Repository: gitlabhq/gitlab-runner Branch: main Commit: 4e9786fae886 Files: 1203 Total size: 11.6 MB Directory structure: gitextract_595h71_a/ ├── .dockerignore ├── .editorconfig ├── .flaky-tests.txt ├── .gitattributes ├── .gitignore ├── .gitlab/ │ ├── .argo/ │ │ └── mr_template_doc_ai.md │ ├── CODEOWNERS │ ├── changelog.yml │ ├── ci/ │ │ ├── _common.gitlab-ci.yml │ │ ├── _kubernetes.gitlab-ci.yml │ │ ├── _project_canonical.gitlab-ci.yml │ │ ├── _project_fork.gitlab-ci.yml │ │ ├── _rules.gitlab-ci.yml │ │ ├── build.gitlab-ci.yml │ │ ├── coverage.gitlab-ci.yml │ │ ├── deploy.gitlab-ci.yml │ │ ├── docs.gitlab-ci.yml │ │ ├── hosted-runners-bridge.gitlab-ci.yml │ │ ├── package.gitlab-ci.yml │ │ ├── postrelease.gitlab-ci.yml │ │ ├── prepare.gitlab-ci.yml │ │ ├── qa.gitlab-ci.yml │ │ ├── rebase.gitlab-ci.yml │ │ ├── release.gitlab-ci.yml │ │ ├── test-kubernetes-integration.gitlab-ci.yml │ │ └── test.gitlab-ci.yml │ ├── dependency_decisions.yml │ ├── duo/ │ │ ├── agent-config.yml │ │ └── mr-review-instructions.yaml │ ├── issue_templates/ │ │ ├── Bug.md │ │ ├── Default.md │ │ ├── Documentation.md │ │ ├── Feature Flag Cleanup.md │ │ ├── Feature Flag Roll Out.md │ │ ├── Feature Proposal.md │ │ ├── Request for test infra feature toggle.md │ │ ├── Security developer workflow.md │ │ ├── bump-golang.md │ │ ├── planning_issue.md │ │ └── trainee-backend-maintainer.md │ ├── merge.release.yml │ ├── merge_request_templates/ │ │ ├── Default.md │ │ ├── Documentation.md │ │ └── Security Release.md │ ├── renovate.json │ └── route-map.yml ├── .gitlab-ci.yml ├── .golangci.yml ├── .labkit_logging_todo.yml ├── .markdownlint-cli2.yaml ├── .mockery.yaml ├── .tool-versions ├── .vale.ini ├── AGENTS.md ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Dangerfile ├── LICENSE ├── Makefile ├── Makefile.build.mk ├── Makefile.runner_helper.mk ├── NOTICE ├── PROCESS.md ├── PROVENANCE.md ├── Procfile ├── README.md ├── VERSION ├── Vagrantfile ├── apps/ │ └── gitlab-runner-helper/ │ └── main.go ├── argo_translation.yml ├── cache/ │ ├── adapter.go │ ├── adapter_test.go │ ├── azure/ │ │ ├── adapter.go │ │ ├── adapter_test.go │ │ ├── azure.go │ │ ├── azure_test.go │ │ ├── credentials_resolver.go │ │ ├── credentials_resolver_test.go │ │ └── mocks.go │ ├── cache.go │ ├── cache_test.go │ ├── cacheconfig/ │ │ ├── cacheconfig.go │ │ └── cacheconfig_test.go │ ├── cachekey/ │ │ ├── cachekey.go │ │ └── cachekey_test.go │ ├── credentials_adapter.go │ ├── credentials_adapter_test.go │ ├── gcs/ │ │ ├── adapter.go │ │ ├── adapter_test.go │ │ ├── credentials_resolver.go │ │ ├── credentials_resolver_test.go │ │ └── mocks.go │ ├── gcsv2/ │ │ ├── adapter.go │ │ └── adapter_test.go │ ├── mocks.go │ ├── s3/ │ │ ├── adapter.go │ │ ├── adapter_test.go │ │ ├── bucket_location_tripper.go │ │ ├── credentials_adapter.go │ │ ├── credentials_adapter_test.go │ │ ├── minio.go │ │ ├── minio_test.go │ │ └── mocks.go │ ├── s3v2/ │ │ ├── adapter.go │ │ ├── adapter_test.go │ │ ├── mocks.go │ │ ├── s3.go │ │ └── s3_test.go │ └── test/ │ └── adapter.go ├── certs/ │ ├── README.md │ ├── apple-developer-id-app-cert.cer │ └── gitlab-inc-ssl-com.crt ├── ci/ │ ├── .test-failures.servercore1809.txt │ ├── .test-failures.servercore21H2.txt │ ├── prebuilt_helper_image │ ├── release_dir │ ├── release_s3 │ ├── rpm_verify_fips │ ├── touch_git │ ├── touch_git.ps1 │ └── version ├── commands/ │ ├── builds_helper.go │ ├── builds_helper_integration_test.go │ ├── builds_helper_test.go │ ├── config.go │ ├── config_unix.go │ ├── config_windows.go │ ├── constants.go │ ├── fleeting/ │ │ ├── fleeting.go │ │ └── fleeting_integration_test.go │ ├── health_helper.go │ ├── helpers/ │ │ ├── archive/ │ │ │ ├── archive.go │ │ │ ├── archive_test.go │ │ │ ├── fastzip/ │ │ │ │ ├── options_test.go │ │ │ │ ├── zip_fastzip_archiver.go │ │ │ │ └── zip_fastzip_extractor.go │ │ │ ├── gziplegacy/ │ │ │ │ └── gzip_legacy_archiver.go │ │ │ ├── mocks.go │ │ │ ├── raw/ │ │ │ │ └── raw_archiver.go │ │ │ ├── tarzstd/ │ │ │ │ ├── ops_unix.go │ │ │ │ ├── ops_windows.go │ │ │ │ ├── tarzstd_archiver.go │ │ │ │ └── tarzstd_extractor.go │ │ │ └── ziplegacy/ │ │ │ ├── zip_legacy_archiver.go │ │ │ └── zip_legacy_extractor.go │ │ ├── archiver.go │ │ ├── archiver_test.go │ │ ├── artifact_metadata.go │ │ ├── artifact_metadata_test.go │ │ ├── artifacts_downloader.go │ │ ├── artifacts_downloader_test.go │ │ ├── artifacts_test.go │ │ ├── artifacts_uploader.go │ │ ├── artifacts_uploader_integration_test.go │ │ ├── artifacts_uploader_test.go │ │ ├── cache_archiver.go │ │ ├── cache_archiver_integration_test.go │ │ ├── cache_archiver_test.go │ │ ├── cache_client.go │ │ ├── cache_defaults.go │ │ ├── cache_defaults_test.go │ │ ├── cache_env.go │ │ ├── cache_env_test.go │ │ ├── cache_extractor.go │ │ ├── cache_extractor_test.go │ │ ├── cache_init.go │ │ ├── cache_init_integration_test.go │ │ ├── cache_metadata.go │ │ ├── cache_metadata_test.go │ │ ├── file_archiver.go │ │ ├── file_archiver_integration_test.go │ │ ├── file_archiver_test.go │ │ ├── health_check.go │ │ ├── health_check_integration_test.go │ │ ├── helpers_archiver_test.go │ │ ├── helpers_cache_archiver_test.go │ │ ├── internal/ │ │ │ └── store/ │ │ │ ├── store.go │ │ │ ├── store_test.go │ │ │ ├── store_unix.go │ │ │ ├── store_unix_test.go │ │ │ ├── store_windows.go │ │ │ └── store_windows_test.go │ │ ├── meter/ │ │ │ ├── formatters.go │ │ │ ├── formatters_test.go │ │ │ ├── meter.go │ │ │ ├── reader.go │ │ │ ├── reader_test.go │ │ │ ├── writer.go │ │ │ └── writer_test.go │ │ ├── mocks.go │ │ ├── proxy_exec.go │ │ ├── proxy_exec_test.go │ │ ├── read_logs.go │ │ ├── read_logs_test.go │ │ ├── retry_helper.go │ │ ├── retry_helper_test.go │ │ └── testdata/ │ │ └── test-artifacts/ │ │ ├── file-0 │ │ ├── file-1 │ │ ├── file-2 │ │ ├── file-3 │ │ └── file-4 │ ├── helpers_register_test.go │ ├── internal/ │ │ └── configfile/ │ │ ├── configfile.go │ │ ├── configfile_test.go │ │ ├── metrics.go │ │ ├── options.go │ │ ├── system_id_state.go │ │ ├── system_id_state_test.go │ │ └── validation.go │ ├── list.go │ ├── mocks.go │ ├── multi.go │ ├── multi_test.go │ ├── register.go │ ├── register_integration_test.go │ ├── register_test.go │ ├── register_windows_test.go │ ├── reset_token.go │ ├── service.go │ ├── service_darwin.go │ ├── service_integration_test.go │ ├── service_linux.go │ ├── service_portable.go │ ├── service_windows.go │ ├── single.go │ ├── single_test.go │ ├── steps/ │ │ ├── steps.go │ │ └── steps_test.go │ ├── testdata/ │ │ ├── .runner_system_id │ │ └── test-config.toml │ ├── tracing.go │ ├── tracing_test.go │ ├── unregister.go │ ├── unregister_test.go │ ├── user_mode_warning.go │ ├── verify.go │ ├── wrapper.go │ └── wrapper_test.go ├── common/ │ ├── allowed_images.go │ ├── allowed_images_test.go │ ├── build.go │ ├── build_settings.go │ ├── build_settings_test.go │ ├── build_step_dispatch.go │ ├── build_step_dispatch_test.go │ ├── build_test.go │ ├── buildlogger/ │ │ ├── build_logger.go │ │ ├── build_logger_test.go │ │ ├── internal/ │ │ │ ├── build_logger_fuzz.go │ │ │ ├── masker/ │ │ │ │ ├── masker.go │ │ │ │ └── masker_test.go │ │ │ ├── nopcloser.go │ │ │ ├── sync.go │ │ │ ├── tee.go │ │ │ ├── testdata/ │ │ │ │ └── corpus/ │ │ │ │ ├── ipsum │ │ │ │ ├── log-1 │ │ │ │ ├── log-2 │ │ │ │ ├── log-3 │ │ │ │ ├── log-4 │ │ │ │ ├── log-5 │ │ │ │ └── small-random │ │ │ ├── timestamper/ │ │ │ │ ├── timestamper.go │ │ │ │ └── timestamper_test.go │ │ │ ├── tokensanitizer/ │ │ │ │ ├── token_masker.go │ │ │ │ └── token_masker_test.go │ │ │ ├── unique.go │ │ │ └── urlsanitizer/ │ │ │ ├── urlsanitizer.go │ │ │ └── urlsanitizer_test.go │ │ └── mocks.go │ ├── buildtest/ │ │ ├── abort.go │ │ ├── binary.go │ │ ├── cleanup.go │ │ ├── job_output_limit.go │ │ ├── masking.go │ │ ├── sections.go │ │ ├── test.go │ │ └── variables.go │ ├── command.go │ ├── config/ │ │ └── runner/ │ │ ├── monitoring/ │ │ │ ├── job_queuing_durations.go │ │ │ └── job_queuing_durations_test.go │ │ └── monitoring.go │ ├── config.go │ ├── config_log_options_test.go │ ├── config_test.go │ ├── consts.go │ ├── executor.go │ ├── executor_test.go │ ├── exit_code.go │ ├── exit_code_test.go │ ├── failure_reason_mapper.go │ ├── failure_reason_mapper_test.go │ ├── labels.go │ ├── mocks.go │ ├── network.go │ ├── network_test.go │ ├── process_logger_adaptor.go │ ├── reset_token.go │ ├── secrets.go │ ├── secrets_test.go │ ├── shell.go │ ├── spec/ │ │ ├── inputs.go │ │ ├── inputs_metrics.go │ │ ├── inputs_metrics_test.go │ │ ├── inputs_test.go │ │ ├── mocks.go │ │ ├── spec.go │ │ ├── spec_test.go │ │ ├── variables.go │ │ └── variables_test.go │ ├── steps.go │ ├── support.go │ ├── test.go │ ├── trace.go │ ├── usage_log.go │ └── version.go ├── config.toml.example ├── dockerfiles/ │ ├── runner/ │ │ ├── Dockerfile │ │ └── docker-bake.hcl │ └── runner-helper/ │ ├── Dockerfile │ ├── Dockerfile.concrete │ └── docker-bake.hcl ├── docs/ │ ├── .markdownlint/ │ │ ├── .markdownlint-cli2.yaml │ │ └── rules/ │ │ └── unnecessary_traversal.js │ ├── .vale/ │ │ ├── gitlab_base/ │ │ │ ├── Ability.yml │ │ │ ├── AlertFormat.yml │ │ │ ├── BadPlurals.yml │ │ │ ├── British.yml │ │ │ ├── CIConfigFile.yml │ │ │ ├── CodeblockFences.yml │ │ │ ├── CommandStringsQuoted.yml │ │ │ ├── CurrentStatus.yml │ │ │ ├── DefaultBranch.yml │ │ │ ├── Dropdown.yml │ │ │ ├── EOLWhitespace.yml │ │ │ ├── ElementDescriptors.yml │ │ │ ├── FutureTense.yml │ │ │ ├── GitLabFlavoredMarkdown.yml │ │ │ ├── HeadingContent.yml │ │ │ ├── HeadingDepth.yml │ │ │ ├── HeadingLink.yml │ │ │ ├── InclusiveLanguage.yml │ │ │ ├── LatinTerms.yml │ │ │ ├── Level.yml │ │ │ ├── ListIndentation.yml │ │ │ ├── MeaningfulLinkWords.yml │ │ │ ├── MergeConflictMarkers.yml │ │ │ ├── MultiLineLinks.yml │ │ │ ├── NonStandardHyphens.yml │ │ │ ├── NonStandardListDashes.yml │ │ │ ├── NonStandardQuotes.yml │ │ │ ├── NonStandardSpaces.yml │ │ │ ├── Offerings.yml │ │ │ ├── OutdatedVersions.yml │ │ │ ├── OxfordComma.yml │ │ │ ├── Possessive.yml │ │ │ ├── PossessiveProperNouns.yml │ │ │ ├── Prerequisites.yml │ │ │ ├── ReadingLevel.yml │ │ │ ├── Repetition.yml │ │ │ ├── SelfReferential.yml │ │ │ ├── SentenceLength.yml │ │ │ ├── SentenceSpacing.yml │ │ │ ├── Simplicity.yml │ │ │ ├── Spelling.yml │ │ │ ├── SubstitutionWarning.yml │ │ │ ├── Substitutions.yml │ │ │ ├── TableDelimiterRows.yml │ │ │ ├── ToDo.yml │ │ │ ├── UnclearAntecedent.yml │ │ │ ├── Units.yml │ │ │ ├── Uppercase.yml │ │ │ ├── WordSlashWord.yml │ │ │ ├── Wordy.yml │ │ │ ├── Zip.yml │ │ │ └── spelling-exceptions.txt │ │ ├── gitlab_docs/ │ │ │ ├── Badges-Offerings.yml │ │ │ ├── Badges-Tiers.yml │ │ │ ├── FrontMatter.yml │ │ │ ├── HTMLShortcodes.yml │ │ │ ├── HistoryItems.yml │ │ │ ├── HistoryItemsOrder.yml │ │ │ ├── ImagesOld.yml │ │ │ ├── InternalLinkCase.yml │ │ │ ├── InternalLinkExtension.yml │ │ │ ├── InternalLinkFormat.yml │ │ │ ├── InternalLinksCode.yml │ │ │ ├── ReferenceLinks.yml │ │ │ ├── RelativeLinks.yml │ │ │ ├── ShortCodeFormat.yml │ │ │ ├── ShortCodeSyntax.yml │ │ │ ├── TabsLinks.yml │ │ │ └── UIText.yml │ │ ├── vale-json.tmpl │ │ └── vale.tmpl │ ├── _index.md │ ├── commands/ │ │ └── _index.md │ ├── configuration/ │ │ ├── _index.md │ │ ├── advanced-configuration.md │ │ ├── autoscale.md │ │ ├── configuring_runner_operator.md │ │ ├── feature-flags.md │ │ ├── gpus.md │ │ ├── init.md │ │ ├── macos_setup.md │ │ ├── oracle_cloud_performance.md │ │ ├── proxy.md │ │ ├── runner_autoscale_aws/ │ │ │ └── _index.md │ │ ├── runner_autoscale_aws_fargate/ │ │ │ └── _index.md │ │ ├── slot_based_cgroups.md │ │ ├── speed_up_job_execution.md │ │ └── tls-self-signed.md │ ├── development/ │ │ ├── _index.md │ │ ├── add-windows-version.md │ │ ├── internal/ │ │ │ ├── ci/ │ │ │ │ ├── kubernetes_integration_tests.md │ │ │ │ └── packages_iteration.md │ │ │ └── engineering/ │ │ │ └── executor_interface/ │ │ │ └── _index.md │ │ └── reviewing-gitlab-runner.md │ ├── executors/ │ │ ├── _index.md │ │ ├── custom.md │ │ ├── custom_examples/ │ │ │ ├── libvirt.md │ │ │ └── lxd.md │ │ ├── docker.md │ │ ├── docker_autoscaler.md │ │ ├── docker_machine.md │ │ ├── instance.md │ │ ├── kubernetes/ │ │ │ ├── _index.md │ │ │ ├── troubleshooting.md │ │ │ └── use_podman_with_kubernetes.md │ │ ├── parallels.md │ │ ├── shell.md │ │ ├── ssh.md │ │ └── virtualbox.md │ ├── faq/ │ │ └── _index.md │ ├── fleet_scaling/ │ │ ├── _index.md │ │ └── fleeting.md │ ├── grit/ │ │ └── _index.md │ ├── install/ │ │ ├── _index.md │ │ ├── bleeding-edge.md │ │ ├── docker.md │ │ ├── environment_variables_in_helm_charts.md │ │ ├── freebsd.md │ │ ├── gpg-keys/ │ │ │ ├── 49F16C5CC3A0F81F.pub.gpg │ │ │ ├── 9CE45ABC880721D4.pub.gpg │ │ │ └── A674BF8135DFA027.pub.gpg │ │ ├── kubernetes-agent.md │ │ ├── kubernetes.md │ │ ├── kubernetes_helm_chart_configuration.md │ │ ├── kubernetes_troubleshooting.md │ │ ├── linux-manually.md │ │ ├── linux-repository.md │ │ ├── operator.md │ │ ├── osx.md │ │ ├── requirements.md │ │ ├── step-runner.md │ │ ├── support-policy.md │ │ ├── windows.md │ │ └── z-os.md │ ├── monitoring/ │ │ └── _index.md │ ├── register/ │ │ └── _index.md │ ├── runner_autoscale/ │ │ ├── _index.md │ │ └── gitlab-runner-autoscaler.md │ ├── security/ │ │ └── _index.md │ └── shells/ │ └── _index.md ├── docs-locale/ │ ├── .markdownlint/ │ │ └── .markdownlint-cli2.yaml │ └── ja-jp/ │ ├── _index.md │ ├── commands/ │ │ └── _index.md │ ├── configuration/ │ │ ├── _index.md │ │ ├── advanced-configuration.md │ │ ├── autoscale.md │ │ ├── configuring_runner_operator.md │ │ ├── feature-flags.md │ │ ├── gpus.md │ │ ├── init.md │ │ ├── macos_setup.md │ │ ├── oracle_cloud_performance.md │ │ ├── proxy.md │ │ ├── runner_autoscale_aws/ │ │ │ └── _index.md │ │ ├── runner_autoscale_aws_fargate/ │ │ │ └── _index.md │ │ ├── slot_based_cgroups.md │ │ ├── speed_up_job_execution.md │ │ └── tls-self-signed.md │ ├── development/ │ │ ├── _index.md │ │ ├── add-windows-version.md │ │ ├── internal/ │ │ │ ├── ci/ │ │ │ │ └── packages_iteration.md │ │ │ └── engineering/ │ │ │ └── executor_interface/ │ │ │ └── _index.md │ │ └── reviewing-gitlab-runner.md │ ├── executors/ │ │ ├── _index.md │ │ ├── custom.md │ │ ├── custom_examples/ │ │ │ ├── libvirt.md │ │ │ └── lxd.md │ │ ├── docker.md │ │ ├── docker_autoscaler.md │ │ ├── docker_machine.md │ │ ├── instance.md │ │ ├── kubernetes/ │ │ │ ├── _index.md │ │ │ ├── troubleshooting.md │ │ │ └── use_podman_with_kubernetes.md │ │ ├── parallels.md │ │ ├── shell.md │ │ ├── ssh.md │ │ └── virtualbox.md │ ├── faq/ │ │ └── _index.md │ ├── fleet_scaling/ │ │ ├── _index.md │ │ └── fleeting.md │ ├── grit/ │ │ └── _index.md │ ├── install/ │ │ ├── _index.md │ │ ├── bleeding-edge.md │ │ ├── docker.md │ │ ├── environment_variables_in_helm_charts.md │ │ ├── freebsd.md │ │ ├── kubernetes-agent.md │ │ ├── kubernetes.md │ │ ├── kubernetes_helm_chart_configuration.md │ │ ├── kubernetes_troubleshooting.md │ │ ├── linux-manually.md │ │ ├── linux-repository.md │ │ ├── operator.md │ │ ├── osx.md │ │ ├── requirements.md │ │ ├── step-runner.md │ │ ├── support-policy.md │ │ ├── windows.md │ │ └── z-os.md │ ├── monitoring/ │ │ └── _index.md │ ├── register/ │ │ └── _index.md │ ├── runner_autoscale/ │ │ ├── _index.md │ │ └── gitlab-runner-autoscaler.md │ ├── security/ │ │ └── _index.md │ └── shells/ │ └── _index.md ├── executors/ │ ├── abstract.go │ ├── custom/ │ │ ├── api/ │ │ │ ├── config.go │ │ │ └── const.go │ │ ├── command/ │ │ │ ├── command.go │ │ │ ├── command_test.go │ │ │ ├── errors.go │ │ │ └── mocks.go │ │ ├── config.go │ │ ├── config_test.go │ │ ├── consts.go │ │ ├── custom.go │ │ ├── custom_test.go │ │ ├── integration_test.go │ │ ├── terminal.go │ │ ├── terminal_test.go │ │ └── testdata/ │ │ └── test_executor/ │ │ ├── .gitignore │ │ └── main.go │ ├── default_executor_provider.go │ ├── docker/ │ │ ├── autoscaler/ │ │ │ ├── autoscaler.go │ │ │ ├── autoscaler_integration_test.go │ │ │ ├── autoscaler_integration_unix_test.go │ │ │ └── autoscaler_integration_windows_test.go │ │ ├── config_updater.go │ │ ├── config_updater_test.go │ │ ├── consts.go │ │ ├── docker.go │ │ ├── docker_command.go │ │ ├── docker_command_integration_test.go │ │ ├── docker_log_options_integration_test.go │ │ ├── docker_steps_integration_test.go │ │ ├── docker_test.go │ │ ├── internal/ │ │ │ ├── exec/ │ │ │ │ ├── exec.go │ │ │ │ ├── exec_test.go │ │ │ │ └── mocks.go │ │ │ ├── labels/ │ │ │ │ ├── labels.go │ │ │ │ ├── labels_test.go │ │ │ │ └── mocks.go │ │ │ ├── networks/ │ │ │ │ ├── manager.go │ │ │ │ ├── manager_integration_test.go │ │ │ │ ├── manager_test.go │ │ │ │ ├── mocks.go │ │ │ │ └── utils.go │ │ │ ├── omitwriter/ │ │ │ │ ├── omit_writer.go │ │ │ │ └── omit_writer_test.go │ │ │ ├── prebuilt/ │ │ │ │ └── prebuilt.go │ │ │ ├── pull/ │ │ │ │ ├── manager.go │ │ │ │ ├── manager_test.go │ │ │ │ └── mocks.go │ │ │ ├── user/ │ │ │ │ ├── mocks.go │ │ │ │ ├── user.go │ │ │ │ └── user_test.go │ │ │ ├── volumes/ │ │ │ │ ├── manager.go │ │ │ │ ├── manager_integration_test.go │ │ │ │ ├── manager_integration_unix_test.go │ │ │ │ ├── manager_integration_windows_test.go │ │ │ │ ├── manager_test.go │ │ │ │ ├── manager_windows_test.go │ │ │ │ ├── mocks.go │ │ │ │ ├── parser/ │ │ │ │ │ ├── base_parser.go │ │ │ │ │ ├── errors.go │ │ │ │ │ ├── linux_parser.go │ │ │ │ │ ├── linux_parser_test.go │ │ │ │ │ ├── mocks.go │ │ │ │ │ ├── parser.go │ │ │ │ │ ├── volume.go │ │ │ │ │ ├── volume_test.go │ │ │ │ │ ├── windows_parser.go │ │ │ │ │ ├── windows_parser_test.go │ │ │ │ │ ├── windows_path.go │ │ │ │ │ ├── windows_path_test.go │ │ │ │ │ └── windows_path_windows.go │ │ │ │ ├── permission/ │ │ │ │ │ ├── linux_set.go │ │ │ │ │ ├── linux_set_integration_test.go │ │ │ │ │ ├── linux_set_test.go │ │ │ │ │ ├── mocks.go │ │ │ │ │ ├── set.go │ │ │ │ │ └── windows_set.go │ │ │ │ ├── utils.go │ │ │ │ ├── utils_test.go │ │ │ │ └── utils_windows_test.go │ │ │ └── wait/ │ │ │ ├── mocks.go │ │ │ ├── wait.go │ │ │ └── wait_test.go │ │ ├── labeler.go │ │ ├── machine/ │ │ │ ├── collector.go │ │ │ ├── collector_test.go │ │ │ ├── consts.go │ │ │ ├── coordinator.go │ │ │ ├── coordinator_test.go │ │ │ ├── data.go │ │ │ ├── details.go │ │ │ ├── details_test.go │ │ │ ├── idle_limit_strategy.go │ │ │ ├── idle_limit_strategy_test.go │ │ │ ├── machine.go │ │ │ ├── machine_test.go │ │ │ ├── name.go │ │ │ ├── name_test.go │ │ │ ├── provider.go │ │ │ ├── provider_test.go │ │ │ ├── shutdown.go │ │ │ ├── shutdown_test.go │ │ │ └── state.go │ │ ├── mocks.go │ │ ├── network.go │ │ ├── provider.go │ │ ├── pull.go │ │ ├── services.go │ │ ├── services_test.go │ │ ├── steps.go │ │ ├── terminal.go │ │ ├── terminal_integration_test.go │ │ ├── terminal_test.go │ │ ├── tty.go │ │ └── volume.go │ ├── environment.go │ ├── executors.go │ ├── init.go │ ├── instance/ │ │ ├── instance.go │ │ └── instance_integration_test.go │ ├── internal/ │ │ ├── autoscaler/ │ │ │ ├── acquisition.go │ │ │ ├── acquisition_test.go │ │ │ ├── executor.go │ │ │ ├── executor_test.go │ │ │ ├── logger/ │ │ │ │ ├── logger.go │ │ │ │ └── logger_test.go │ │ │ ├── nesting_init.go │ │ │ ├── nesting_init_test.go │ │ │ ├── provider.go │ │ │ └── provider_test.go │ │ └── readywriter/ │ │ ├── readywriter.go │ │ └── readywriter_test.go │ ├── kubernetes/ │ │ ├── autoscaler/ │ │ │ ├── metrics.go │ │ │ ├── pause_pod_manager.go │ │ │ ├── pause_pod_manager_test.go │ │ │ ├── policy.go │ │ │ ├── policy_test.go │ │ │ ├── provider.go │ │ │ └── provider_test.go │ │ ├── container_entrypoint_forwarder.go │ │ ├── container_entrypoint_forwarder_test.go │ │ ├── exec.go │ │ ├── exec_test.go │ │ ├── feature.go │ │ ├── feature_test.go │ │ ├── helpers_kubernetes_test.go │ │ ├── host_aliases.go │ │ ├── host_aliases_test.go │ │ ├── internal/ │ │ │ ├── pull/ │ │ │ │ ├── errors.go │ │ │ │ ├── manager.go │ │ │ │ ├── manager_test.go │ │ │ │ └── mocks.go │ │ │ └── watchers/ │ │ │ ├── informer_factory.go │ │ │ ├── mocks.go │ │ │ ├── pod.go │ │ │ ├── pod_integration_test.go │ │ │ └── pod_test.go │ │ ├── kubernetes.go │ │ ├── kubernetes_integration_test.go │ │ ├── kubernetes_test.go │ │ ├── log_processor.go │ │ ├── log_processor_test.go │ │ ├── mocks.go │ │ ├── overwrites.go │ │ ├── overwrites_test.go │ │ ├── provider.go │ │ ├── service_proxy.go │ │ ├── service_proxy_test.go │ │ ├── terminal.go │ │ ├── util.go │ │ └── util_test.go │ ├── mocks.go │ ├── parallels/ │ │ ├── parallels.go │ │ └── parallels_integration_test.go │ ├── shell/ │ │ ├── shell.go │ │ ├── shell_integration_test.go │ │ ├── shell_terminal.go │ │ └── shell_test.go │ ├── ssh/ │ │ ├── ssh.go │ │ └── ssh_test.go │ ├── virtualbox/ │ │ ├── virtualbox.go │ │ └── virtualbox_integration_test.go │ └── vm/ │ ├── vm.go │ └── vm_test.go ├── functions/ │ ├── concrete/ │ │ ├── builder/ │ │ │ ├── builder.go │ │ │ ├── builder_test.go │ │ │ ├── options.go │ │ │ └── variables/ │ │ │ ├── mocks.go │ │ │ ├── variables.go │ │ │ └── variables_test.go │ │ ├── concrete.go │ │ └── run/ │ │ ├── cacheprovider/ │ │ │ └── descriptor.go │ │ ├── env/ │ │ │ ├── env.go │ │ │ └── env_test.go │ │ ├── runner.go │ │ ├── runner_test.go │ │ └── stages/ │ │ ├── artifact_download.go │ │ ├── artifact_upload.go │ │ ├── cache_archive.go │ │ ├── cache_extract.go │ │ ├── cleanup.go │ │ ├── get_sources.go │ │ ├── get_sources_git_integration_test.go │ │ ├── get_sources_test.go │ │ ├── internal/ │ │ │ └── scriptwriter/ │ │ │ ├── scriptwriter.go │ │ │ └── scriptwriter_test.go │ │ ├── step.go │ │ └── step_test.go │ └── script_legacy/ │ ├── internal/ │ │ ├── command_formatter.go │ │ ├── command_formatter_test.go │ │ ├── command_processor.go │ │ ├── command_processor_test.go │ │ ├── escape.go │ │ ├── escape_test.go │ │ ├── executor.go │ │ ├── executor_test.go │ │ ├── script_generator.go │ │ ├── script_generator_test.go │ │ ├── script_header.go │ │ ├── script_header_test.go │ │ ├── shell.go │ │ ├── shell_test.go │ │ ├── trace_section.go │ │ └── trace_section_test.go │ ├── script_legacy.go │ └── script_legacy_test.go ├── go.mod ├── go.sum ├── helpers/ │ ├── ansi_colors.go │ ├── archives/ │ │ ├── gzip_create.go │ │ ├── gzip_create_test.go │ │ ├── os_unix.go │ │ ├── os_windows.go │ │ ├── path_check_helper.go │ │ ├── path_check_helper_test.go │ │ ├── path_error_tracker.go │ │ ├── path_error_tracker_test.go │ │ ├── zip_create.go │ │ ├── zip_create_test.go │ │ ├── zip_create_unix_test.go │ │ ├── zip_create_windows_test.go │ │ ├── zip_extra.go │ │ ├── zip_extra_test.go │ │ ├── zip_extra_unix.go │ │ ├── zip_extra_windows.go │ │ ├── zip_extract.go │ │ └── zip_extract_test.go │ ├── aws/ │ │ └── service/ │ │ ├── aws_service.go │ │ ├── aws_service_test.go │ │ └── mocks.go │ ├── azure_key_vault/ │ │ └── service/ │ │ ├── azure_key_vault.go │ │ └── mocks.go │ ├── build_section.go │ ├── build_section_test.go │ ├── certificate/ │ │ ├── certificate.go │ │ ├── mocks.go │ │ ├── x509.go │ │ └── x509_test.go │ ├── cli/ │ │ ├── cpuprofile.go │ │ ├── fix_home.go │ │ ├── init_cli.go │ │ ├── init_cli_windows.go │ │ ├── runtime_platform.go │ │ ├── runtime_platform_test.go │ │ └── warn_on_bool.go │ ├── container/ │ │ ├── helperimage/ │ │ │ ├── info.go │ │ │ ├── info_test.go │ │ │ ├── linux_info.go │ │ │ ├── linux_info_test.go │ │ │ ├── mocks.go │ │ │ ├── windows_info.go │ │ │ └── windows_info_test.go │ │ ├── services/ │ │ │ ├── services.go │ │ │ ├── services_test.go │ │ │ └── test/ │ │ │ └── test.go │ │ └── windows/ │ │ ├── version.go │ │ └── version_test.go │ ├── converter.go │ ├── converter_test.go │ ├── dns/ │ │ ├── test/ │ │ │ └── test.go │ │ ├── utils.go │ │ └── utils_test.go │ ├── docker/ │ │ ├── auth/ │ │ │ ├── auth.go │ │ │ ├── auth_test.go │ │ │ ├── mocks.go │ │ │ └── testdata/ │ │ │ ├── docker-credential-bin.sh │ │ │ └── docker-credential-windows.cmd │ │ ├── client.go │ │ ├── credentials.go │ │ ├── errors/ │ │ │ └── errors.go │ │ ├── machine.go │ │ ├── machine_command.go │ │ ├── machine_command_test.go │ │ ├── mocks.go │ │ ├── official_docker_client.go │ │ ├── official_docker_client_test.go │ │ ├── options.go │ │ └── test/ │ │ └── error.go │ ├── fatal_panic.go │ ├── featureflags/ │ │ ├── flags.go │ │ └── flags_test.go │ ├── gcp_secret_manager/ │ │ └── service/ │ │ ├── gcp_secret_manager.go │ │ └── gcp_secret_manager_test.go │ ├── gitlab_secrets_manager/ │ │ └── service/ │ │ ├── gitlab_secrets_manager.go │ │ └── gitlab_secrets_manager_test.go │ ├── homedir/ │ │ ├── homedir.go │ │ └── homedir_test.go │ ├── integration_tests.go │ ├── limitwriter/ │ │ ├── limit_writer.go │ │ └── limit_writer_test.go │ ├── mocks.go │ ├── observability/ │ │ ├── multi_exporter.go │ │ └── multi_exporter_test.go │ ├── os/ │ │ ├── other.go │ │ └── windows.go │ ├── parallels/ │ │ └── control.go │ ├── path/ │ │ ├── unix_path.go │ │ ├── unix_path_test.go │ │ ├── windows_path.go │ │ └── windows_path_test.go │ ├── path.go │ ├── path_test.go │ ├── process/ │ │ ├── commander.go │ │ ├── commander_unix_test.go │ │ ├── ensure_subprocess_termination_integration_test.go │ │ ├── group_unix_test.go │ │ ├── group_windows_test.go │ │ ├── helpers_killer_test.go │ │ ├── job_unix.go │ │ ├── job_windows.go │ │ ├── killer.go │ │ ├── killer_integration_test.go │ │ ├── killer_test.go │ │ ├── killer_unix.go │ │ ├── killer_unix_integration_test.go │ │ ├── killer_unix_test.go │ │ ├── killer_windows.go │ │ ├── killer_windows_integration_test.go │ │ ├── logger.go │ │ ├── mocks.go │ │ └── testdata/ │ │ ├── ensure_subprocess_termination/ │ │ │ └── main.go │ │ └── sleep/ │ │ └── main.go │ ├── prometheus/ │ │ ├── failures_collector.go │ │ ├── failures_collector_test.go │ │ ├── log_hook.go │ │ └── log_hook_test.go │ ├── pull_policies/ │ │ ├── pull_policies.go │ │ └── pull_policies_test.go │ ├── random_uuid.go │ ├── retry/ │ │ ├── mocks.go │ │ ├── retry.go │ │ └── retry_test.go │ ├── runner_wrapper/ │ │ ├── api/ │ │ │ ├── 000_proto_generate.go │ │ │ ├── client/ │ │ │ │ ├── backoff.go │ │ │ │ ├── client.go │ │ │ │ ├── options.go │ │ │ │ ├── target.go │ │ │ │ └── target_test.go │ │ │ ├── errors.go │ │ │ ├── go.mod │ │ │ ├── go.sum │ │ │ ├── init_graceful_shutdown_request.go │ │ │ ├── mocks.go │ │ │ ├── proto/ │ │ │ │ ├── mocks.go │ │ │ │ ├── wrapper.pb.go │ │ │ │ ├── wrapper.proto │ │ │ │ └── wrapper_grpc.pb.go │ │ │ ├── server/ │ │ │ │ ├── mocks.go │ │ │ │ ├── server.go │ │ │ │ └── server_test.go │ │ │ ├── shutdown_callback.go │ │ │ ├── shutdown_callback_test.go │ │ │ └── status.go │ │ ├── commander.go │ │ ├── commander_test.go │ │ ├── commander_unix.go │ │ ├── commander_windows.go │ │ ├── mocks.go │ │ ├── testdata/ │ │ │ └── commander-binary/ │ │ │ └── main.go │ │ ├── wrapper.go │ │ ├── wrapper_test.go │ │ ├── wrapper_unix.go │ │ └── wrapper_windows.go │ ├── secrets/ │ │ ├── errors.go │ │ ├── errors_test.go │ │ └── resolvers/ │ │ ├── aws/ │ │ │ ├── aws_secrets_manager_resolver.go │ │ │ ├── aws_secrets_manager_resolver_integration_test.go │ │ │ ├── aws_secrets_manager_resolver_test.go │ │ │ └── mocks.go │ │ ├── azure_key_vault/ │ │ │ ├── azure_key_vault_resolver.go │ │ │ └── azure_key_vault_resolver_test.go │ │ ├── gcp_secret_manager/ │ │ │ ├── mocks.go │ │ │ ├── resolver.go │ │ │ ├── resolver_integration_test.go │ │ │ └── resolver_test.go │ │ ├── gitlab_secrets_manager/ │ │ │ ├── resolver.go │ │ │ └── resolver_test.go │ │ └── vault/ │ │ ├── resolver.go │ │ └── resolver_test.go │ ├── sentry/ │ │ ├── log_hook.go │ │ └── log_hook_test.go │ ├── service/ │ │ ├── logger.go │ │ ├── logger_test.go │ │ ├── mocks.go │ │ ├── scripts.go │ │ ├── service_factory.go │ │ ├── simple.go │ │ └── simple_test.go │ ├── shell_escape.go │ ├── shell_escape_test.go │ ├── shorten_token.go │ ├── shorten_token_test.go │ ├── ssh/ │ │ ├── consts.go │ │ ├── ssh_command.go │ │ ├── ssh_command_test.go │ │ ├── stub_ssh_server.go │ │ ├── stub_ssh_server_unix.go │ │ └── stub_ssh_server_windows.go │ ├── test/ │ │ └── helpers.go │ ├── timeperiod/ │ │ ├── period.go │ │ └── period_test.go │ ├── tls/ │ │ ├── ca_chain/ │ │ │ ├── builder.go │ │ │ ├── builder_test.go │ │ │ ├── helpers.go │ │ │ ├── helpers_test.go │ │ │ ├── mocks.go │ │ │ ├── resolver.go │ │ │ ├── resolver_chain.go │ │ │ ├── resolver_chain_test.go │ │ │ ├── resolver_url.go │ │ │ ├── resolver_url_test.go │ │ │ ├── resolver_verify.go │ │ │ └── resolver_verify_test.go │ │ └── consts.go │ ├── toml_test.go │ ├── trace/ │ │ ├── buffer.go │ │ ├── buffer_fd0_test.go │ │ └── buffer_test.go │ ├── transfer/ │ │ ├── content_range.go │ │ ├── content_range_test.go │ │ ├── parallel_download.go │ │ └── parallel_download_test.go │ ├── url/ │ │ ├── clean_url.go │ │ ├── clean_url_test.go │ │ ├── gitauth.go │ │ └── gitauth_test.go │ ├── usage_log/ │ │ ├── logrotate/ │ │ │ ├── options.go │ │ │ ├── writer.go │ │ │ └── writer_test.go │ │ ├── mocks.go │ │ ├── options.go │ │ ├── record.go │ │ ├── storage.go │ │ └── storage_test.go │ ├── vault/ │ │ ├── auth.go │ │ ├── auth_methods/ │ │ │ ├── data.go │ │ │ ├── data_test.go │ │ │ ├── jwt/ │ │ │ │ ├── auth.go │ │ │ │ └── auth_test.go │ │ │ ├── registry.go │ │ │ └── registry_test.go │ │ ├── client.go │ │ ├── client_test.go │ │ ├── internal/ │ │ │ └── registry/ │ │ │ ├── mocks.go │ │ │ ├── registry.go │ │ │ └── registry_test.go │ │ ├── mocks.go │ │ ├── result.go │ │ ├── result_test.go │ │ ├── secret_engine.go │ │ ├── secret_engines/ │ │ │ ├── generic/ │ │ │ │ ├── engine.go │ │ │ │ └── engine_test.go │ │ │ ├── kv_v2/ │ │ │ │ ├── engine.go │ │ │ │ └── engine_test.go │ │ │ ├── operations.go │ │ │ ├── operations_test.go │ │ │ ├── registry.go │ │ │ └── registry_test.go │ │ ├── service/ │ │ │ ├── mocks.go │ │ │ ├── vault.go │ │ │ └── vault_test.go │ │ ├── utils.go │ │ └── utils_test.go │ ├── virtualbox/ │ │ ├── control.go │ │ ├── control_test.go │ │ └── control_windows.go │ └── warning_panic.go ├── log/ │ ├── configuration.go │ ├── configuration_test.go │ ├── dump_unix.go │ ├── dump_unix_test.go │ ├── dump_windows.go │ ├── mocks.go │ ├── runner_formatter.go │ ├── runner_formatter_test.go │ ├── system_logger.go │ ├── system_logger_test.go │ └── test/ │ ├── hook.go │ └── hook_test.go ├── magefiles/ │ ├── build/ │ │ ├── blueprint.go │ │ ├── checker.go │ │ ├── components.go │ │ ├── exporter.go │ │ ├── mocks.go │ │ └── variables.go │ ├── ci/ │ │ └── variables.go │ ├── docker/ │ │ └── buildx.go │ ├── docs/ │ │ └── writing_mage_targets.md │ ├── docutils/ │ │ ├── section_replacer.go │ │ ├── section_replacer_test.go │ │ └── testdata/ │ │ ├── source.md │ │ └── source_rewritten.md │ ├── env/ │ │ ├── mocks.go │ │ └── var.go │ ├── hosted_runners/ │ │ ├── bridge.go │ │ ├── bridge_test.go │ │ ├── testdata/ │ │ │ ├── table.md │ │ │ └── table_rewritten.md │ │ └── wiki_client.go │ ├── hosted_runners.go │ ├── k8s.go │ ├── kubernetes/ │ │ ├── docs/ │ │ │ ├── analyzer.go │ │ │ ├── analyzer_test.go │ │ │ ├── generate_permissions.go │ │ │ ├── mocks.go │ │ │ ├── role.yaml.tpl │ │ │ └── testdata/ │ │ │ ├── kubernetes_analyzer_api_declaration.go │ │ │ ├── kubernetes_analyzer_api_declaration_reassigned.go │ │ │ ├── kubernetes_analyzer_api_fn_arg.go │ │ │ ├── kubernetes_analyzer_api_nonpointer_call.go │ │ │ ├── kubernetes_analyzer_api_pointer_call.go │ │ │ └── kubernetes_analyzer_api_unnamed_field.go │ │ ├── docs.go │ │ ├── provision/ │ │ │ ├── manifests/ │ │ │ │ ├── rolebinding.yaml.tpl │ │ │ │ └── serviceaccount.yaml.tpl │ │ │ └── provisioner.go │ │ └── provisioner.go │ ├── magefile.go │ ├── mageutils/ │ │ └── mageutils.go │ ├── package.go │ ├── package_deb.go │ ├── package_rpm.go │ ├── packages/ │ │ ├── blueprint.go │ │ ├── create.go │ │ ├── docs.go │ │ ├── mocks.go │ │ ├── package.go │ │ ├── verify.go │ │ └── verify_test.go │ ├── pulp/ │ │ ├── mocks.go │ │ ├── push.go │ │ ├── push_test.go │ │ └── releases.go │ ├── pulp.go │ └── resources.go ├── main.go ├── main_test.go ├── network/ │ ├── api_requests_collector.go │ ├── api_requests_collector_test.go │ ├── client.go │ ├── client_test.go │ ├── gitlab.go │ ├── gitlab_test.go │ ├── mocks.go │ ├── patch_response.go │ ├── remote_job_state_response.go │ ├── remote_job_state_response_test.go │ ├── requester.go │ ├── retry_requester.go │ ├── retry_requester_test.go │ ├── trace.go │ └── trace_test.go ├── packaging/ │ ├── root/ │ │ └── usr/ │ │ └── share/ │ │ └── gitlab-runner/ │ │ ├── clear-docker-cache │ │ ├── post-install │ │ └── pre-remove │ └── scripts/ │ ├── postinst.deb │ ├── postinst.rpm │ ├── prerm.deb │ └── prerm.rpm ├── referees/ │ ├── metrics.go │ ├── metrics_test.go │ ├── mocks.go │ ├── prometheus_api.go │ ├── referees.go │ └── referees_test.go ├── router/ │ ├── client.go │ ├── client_conn_factory.go │ ├── client_test.go │ ├── internal/ │ │ └── wstunnel/ │ │ ├── client.go │ │ ├── netconn.go │ │ └── netconn_test.go │ ├── mocks.go │ ├── rpc/ │ │ ├── generate.go │ │ ├── mocks.go │ │ ├── rpc.pb.go │ │ ├── rpc.proto │ │ └── rpc_grpc.pb.go │ └── token_creds.go ├── scripts/ │ ├── check-test-directives/ │ │ └── main.go │ ├── common-pkcs11.sh │ ├── docs-i18n-verify-paths │ ├── envs/ │ │ ├── README.md │ │ ├── allowlist_common.env │ │ ├── allowlist_unix.env │ │ └── allowlist_windows.env │ ├── lint-docs │ ├── lint-i18n-docs │ ├── local-env │ ├── pusher/ │ │ ├── go.mod │ │ ├── go.sum │ │ ├── helper-images.json │ │ ├── main.go │ │ └── runner-images.json │ ├── security-harness │ ├── sign-macos-binaries │ ├── sign-windows-binaries │ ├── update-feature-flags-docs/ │ │ └── main.go │ └── vagrant/ │ └── provision/ │ ├── base.ps1 │ ├── enable_developer_mode.ps1 │ ├── enable_sshd.ps1 │ ├── install_PSWindowsUpdate.ps1 │ └── windows_update.ps1 ├── session/ │ ├── proxy/ │ │ ├── mocks.go │ │ ├── proxy.go │ │ └── proxy_test.go │ ├── server.go │ ├── server_test.go │ ├── session.go │ ├── session_test.go │ └── terminal/ │ ├── mocks.go │ └── terminal.go ├── shells/ │ ├── abstract.go │ ├── abstract_test.go │ ├── bash.go │ ├── bash_test.go │ ├── consts.go │ ├── git_credentials_helper_integration_test.go │ ├── mocks.go │ ├── powershell.go │ ├── powershell_integration_test.go │ ├── powershell_test.go │ ├── proxy_exec.go │ ├── shell_writer.go │ ├── shell_writer_integration_test.go │ ├── shell_writer_test.go │ ├── shellstest/ │ │ └── utils.go │ ├── trap_command_exit_status.go │ └── trap_command_exit_status_test.go ├── steps/ │ ├── execute.go │ ├── execute_test.go │ ├── mocks.go │ ├── steps.go │ ├── steps_test.go │ └── stepstest/ │ └── server.go └── tests/ ├── dockerfiles/ │ ├── README.md │ ├── alpine-entrypoint/ │ │ ├── Dockerfile │ │ ├── Dockerfile.pre-post-trap │ │ ├── Dockerfile.stderr │ │ ├── entrypoint-stderr.sh │ │ ├── entrypoint.pre-post-trap.sh │ │ └── entrypoint.sh │ ├── alpine-id-overflow/ │ │ └── Dockerfile │ ├── alpine-no-root/ │ │ └── Dockerfile │ ├── counter-service/ │ │ └── Dockerfile │ ├── docker-bake.hcl │ ├── gitlab-runner-helper-entrypoint/ │ │ ├── dockerfile │ │ ├── entrypoint.sh │ │ └── scripts/ │ │ └── gitlab-runner-build │ └── powershell-entrypoint/ │ ├── Dockerfile.pre-post-trap │ └── entrypoint.pre-post-trap.ps1 ├── test_installation.sh ├── test_script.sh └── ubuntu/ ├── Makefile └── Vagrantfile ================================================ FILE CONTENTS ================================================ ================================================ FILE: .dockerignore ================================================ .idea/ out/ builds/ ================================================ FILE: .editorconfig ================================================ # top-most EditorConfig file root = true # Unix-style newlines with a newline ending every file [*] end_of_line = lf trim_trailing_whitespace = true insert_final_newline = true indent_style = space charset = utf-8 [*.{md,markdown}] trim_trailing_whitespace = false [*.{go,mod}] indent_style = tab max_line_length = 120 [Makefile*] indent_style = tab [*.{yml,yaml}] indent_size = 2 max_line_length = 140 ================================================ FILE: .flaky-tests.txt ================================================ TestBuildCacheHelper/bash/cache_settings_provided,_no_job_cache_provided TestBuildCancel/bash/job_is_aborted TestBuildCancel/pwsh/job_is_aborted TestBuildLogLimitExceeded/bash/failed_job TestBuildLogLimitExceeded/bash/successful_job TestBuildLogLimitExceeded/canceled_job TestBuildLogLimitExceeded/failed_job TestBuildLogLimitExceeded/powershell/failed_job TestBuildLogLimitExceeded/pwsh/failed_job TestBuildPassingEnvsMultistep/bash TestBuildWithGitStrategyCloneWithLFS/pwsh TestClientInvalidTLSAuth TestCredSetup/FF_GIT_URLS_WITHOUT_TOKENS:true TestCredSetup/FF_GIT_URLS_WITHOUT_TOKENS:true/GIT_STRATEGY:clone TestCredSetup/FF_GIT_URLS_WITHOUT_TOKENS:true/GIT_STRATEGY:clone/bash TestCredSetup/FF_GIT_URLS_WITHOUT_TOKENS:true/GIT_STRATEGY:fetch TestCredSetup/FF_GIT_URLS_WITHOUT_TOKENS:true/GIT_STRATEGY:fetch/bash TestDockerBuildContainerGracefulShutdownWithInit/job_cancelled TestDockerCommandUsingCustomClonePath/uses_custom_clone_path TestDockerLogOptions/invalid_key_rejected_early TestDockerLogOptions/multiple_invalid_keys_rejected_early TestDockerLogOptions/service_container_with_invalid_options TestExecutor_Run/canceled_job_uses_new_process_termination/powershell TestKiller/command_terminated_via_job_object TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off/step_ TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off/step_/delete_now TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:on TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:on/prepare_script TestRunIntegrationTestsWithFeatureFlag/testDeletedPodSystemFailureDuringExecution/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:on/prepare_script/evict_gracefully TestRunIntegrationTestsWithFeatureFlag/testKubernetesBuildLogLimitExceeded/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off TestRunIntegrationTestsWithFeatureFlag/testKubernetesBuildLogLimitExceeded/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off/failed_job TestRunIntegrationTestsWithFeatureFlag/testKubernetesBuildLogLimitExceeded/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:on TestRunIntegrationTestsWithFeatureFlag/testKubernetesBuildLogLimitExceeded/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:on/failed_job TestRunIntegrationTestsWithFeatureFlag/testKubernetesServiceContainerAlias/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off TestRunIntegrationTestsWithFeatureFlag/testKubernetesServiceContainerAlias/FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:off/service_container_with_multiple_similar_aliases Test_ServiceLabels ================================================ FILE: .gitattributes ================================================ *.sh text eol=lf ci/version text eol=lf ================================================ FILE: .gitignore ================================================ *.iml config.toml config.toml.lock .project out builds/ vendor/ commands/helpers/archive.zip dockerfiles/**/checksums-* dockerfiles/runner/*/install-deps dockerfiles/runner/alpine/gitlab-runner-linux-* dockerfiles/runner/ubuntu/gitlab-runner_*.deb dockerfiles/runner/ubi-fips/gitlab-runner-linux-* dockerfiles/runner/ubi-fips/gitlab-runner_*.rpm dockerfiles/runner-helper/binaries/ .DS_Store .idea/ tests/ubuntu/.vagrant artifacts tmp/gitlab-test /.tmp/ # Ignore all editorconfig files except the root one .editorconfig !/.editorconfig testsdefinitions.txt /.testoutput/ /.cover/ /.vagrant/ ci/.test-failures.servercore*.txt.updated # Ignore Visual Studio Code internals /.vscode /debug debug.test # Ignore the generated binary /gitlab-runner* node_modules ================================================ FILE: .gitlab/.argo/mr_template_doc_ai.md ================================================ # What does this MR do? This merge request contains translations of GitLab product documentation. The source files are from the `/doc` directory, and translations are returned to language-specific directories under `/doc-locale`. ## Translation MR information - Argo Request: [{{argo_request_key}}: {{argo_request_name}}]({{argo_request_url}}) - Source: {{source_content_origin}} ## Review workflow For the full review workflow documentation, see the [Translation MR Review Workflow](https://gitlab.com/gitlab-com/localization/docs-site-localization/-/blob/main/translation_mr_review_workflow.md). ### Assignee checklist - [ ] Fix conflicts (check commit history of each file in `main` to identify target changes causing conflicts, such as translation changes on production or TW shortcode/linting updates) - [ ] Fix any pipeline issues - [ ] Rebase if needed - [ ] Check the review app for all impacted pages (Duo can help produce a list of URLs) - [ ] Remove the MR from Draft mode (this triggers the first review by GitLab Duo) - [ ] If the Duo review identified translation errors requiring review by [Japanese content maintainers](https://gitlab.com/gitlab-com/localization/maintainers/japanese), ping and add them as a reviewer. - [ ] Hand off for review to a [tech docs maintainer](https://gitlab.com/gitlab-com/localization/maintainers/tech-docs). The MR should be ready to merge at this point ### Review App | Review app | | ---------- | | | ### Reviewer checklist - [ ] Review changes - [ ] Verify build pipeline - [ ] Merge on approval /title Product Docs AI Translation: {{argo_request_key}} #{{translation_mr_number}} /draft /assign @gitlab-argo-bot /label ~documentation /label ~"gitlab-translation-service" /label ~"group::localization" /label ~"docs-only" /label ~"type::maintenance" ================================================ FILE: .gitlab/CODEOWNERS ================================================ # When adding a group as a code owner, make sure to invite the group to the # project here: https://gitlab.com/gitlab-org/gitlab-runner/-/project_members # As described in https://docs.gitlab.com/user/project/codeowners/ * @gitlab-com/runner-maintainers .editorconfig @gitlab-com/runner-group @gitlab-com/runner-maintainers .gitattributes @gitlab-com/runner-group @gitlab-com/runner-maintainers .gitignore @gitlab-com/runner-group @gitlab-com/runner-maintainers [Hosted Runners] @gitlab-org/production-engineering/runners-platform .gitlab/ci/hosted-runners-bridge.gitlab-ci.yml magefiles/hosted_runners.go magefiles/hosted_runners/ [Pipeline Security] helpers/vault/ @gitlab-com/pipeline-security-group/backend helpers/gcp_secret_manager/ @gitlab-com/pipeline-security-group/backend helpers/azure_key_vault/ @gitlab-com/pipeline-security-group/backend helpers/aws/ @gitlab-com/pipeline-security-group/backend helpers/gitlab_secrets_manager/ @gitlab-com/pipeline-security-group/backend helpers/secrets/ @gitlab-com/pipeline-security-group/backend [Documentation] .markdownlint.yml @gitlab-com/runner-docs-maintainers /docs/ @gitlab-com/runner-docs-maintainers ## Localization /docs-locale/ @gitlab-com/localization/maintainers/tech-docs /docs-locale/ja-jp @gitlab-com/localization/maintainers/japanese @gitlab-com/localization/maintainers/tech-docs /argo_translation.yml @gitlab-com/localization/maintainers/tech-docs ================================================ FILE: .gitlab/changelog.yml ================================================ default_scope: other names: new-feature: New features security-fix: Security fixes fix: Bug fixes maintenance: Maintenance runner-distribution: GitLab Runner distribution documentation: Documentation changes other: Other changes order: - new-feature - security-fix - fix - maintenance - runner-distribution - documentation - other label_matchers: - labels: - runner-distribution scope: runner-distribution - labels: - type::feature scope: new-feature - labels: - feature::addition scope: new-feature - labels: - security scope: security-fix - labels: - type::bug scope: fix - labels: - type::maintenance scope: maintenance - labels: - feature::enhancement scope: maintenance - labels: - technical debt scope: maintenance - labels: - tooling::pipelines scope: maintenance - labels: - tooling::workflow scope: maintenance - labels: - documentation scope: documentation authorship_labels: - Community contribution skip_changelog_labels: - skip-changelog ================================================ FILE: .gitlab/ci/_common.gitlab-ci.yml ================================================ variables: # renovate: datasource=docker depName=golang allowedVersions=/1\.26\..+/ # When updating GO_VERSION, update Go versions in docs/development/_index.md # or the 'docs:check development docs Go version' job will fail RUBY_VERSION: "3.4.8" GO_VERSION: "1.26.1" GOLANGLINT_VERSION: "2.11.4" RUNNER_IMAGES_REGISTRY: registry.gitlab.com/gitlab-org/ci-cd/runner-tools/base-images RUNNER_IMAGES_VERSION: "0.0.41" RUNNER_IMAGES_WINDOWS_GO_URL: https://gitlab.com/api/v4/projects/gitlab-org%2fci-cd%2frunner-tools%2fbase-images/packages/generic/runner-images/v${RUNNER_IMAGES_VERSION}/golang-windows-amd64.zip CI_IMAGE: "${RUNNER_IMAGES_REGISTRY}/ci:${RUNNER_IMAGES_VERSION}" DOCS_LINT_IMAGE: registry.gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/lint-markdown:alpine-3.22-vale-3.13.0-markdownlint2-0.19.0-lychee-0.21.0 # Feature flags FF_SCRIPT_SECTIONS: "true" FF_USE_FASTZIP: "true" FF_USE_NEW_BASH_EVAL_STRATEGY: "true" FF_TIMESTAMPS: "true" # Following variables are used in some jobs to install specified software RELEASE_INDEX_GEN_VERSION: "latest" DOCKER_VERSION: 27.3.1 LICENSE_MANAGEMENT_SETUP_CMD: echo "Skip setup. Dependency already vendored" DOCS_GITLAB_REPO_SUFFIX: "runner" # We're overriding rules for the jobs that we want to run. # This will disable all other rules. DEPENDENCY_SCANNING_DISABLED: "true" TRANSFER_METER_FREQUENCY: "5s" CACHE_COMPRESSION_FORMAT: tarzstd GO111MODULE: "on" # renovate: datasource=docker depName=redhat/ubi9-micro versioning=redhat allowedVersions=/9\.4-[0-9]+/ PACKAGES_ITERATION: "1" ZSTD_VERSION: "1.5.7.20250308" ZSTD_CHECKSUM: "a96dc5417943c03fa231bf2d6a586b7ae7254fa52fdc15d302f296b5ff88e1ff0f07120a720149eb82ea0f0c65444393ebf05d2ee1bd1db341b803ff65a2e675" # This is the runner tag that will be used for Kubernetes jobs. The "kubernetes_runner" tags picks uses the general runner kubernetes # runner. There are two deployments for the kubernetes runners - blue and green. Use either "kubernetes_runner_blue" or # "kubernetes_runner_green" if a specific one is desired. Usually one of the two colors will be paused in the CI/CD UI # and the general "kubernetes_runner" tag will pickup whichever isn't. # To not run tests inside kubernetes change the tag with gitlab-org. KUBERNETES_RUNNER_TAG: kubernetes_runner # The integration tag cannot be changed as easily as it uses the cluster to run the integration tests as pods. # If required the jobs can be skipped KUBERNETES_RUNNER_INTEGRATION_TAG: kubernetes_integration # Pilot runner toggle. Set USE_PILOT_RUNNERS to "false" to fall back to shared/instance runners # (e.g. to restore GitLab Duo functionality). When "true" (default), jobs run on functions-pilot-* tagged runners. USE_PILOT_RUNNERS: "true" RUNNER_TAG_DEFAULT: "functions-pilot-linux-amd64" RUNNER_TAG_DOCKER: "functions-pilot-linux-amd64" RUNNER_TAG_2XLARGE: "functions-pilot-linux-amd64" RUNNER_TAG_MEDIUM: "functions-pilot-linux-amd64" RUNNER_TAG_WINDOWS_2019: "windows-1809" RUNNER_TAG_WINDOWS_2022: "windows-21h1" workflow: rules: !reference [".rules:kubernetes:tag:if-not-canonical", rules] default: image: $CI_IMAGE tags: - !reference [.instance-default] retry: max: 2 when: - runner_system_failure .no_cache: cache: {} .no_dependencies: dependencies: [] .no_cache_and_dependencies: extends: - .no_cache - .no_dependencies .docker: services: - docker:${DOCKER_VERSION}-dind variables: DOCKER_HOST: "unix:///certs/client/docker.sock" BUILDX_BAKE_ENTITLEMENTS_FS: 0 tags: - !reference [.instance-default-docker] .go-cache: variables: GODEBUG: gocachetest=1 GOCACHE: $CI_PROJECT_DIR/.gocache-$CI_COMMIT_REF_PROTECTED before_script: - mkdir -p "$GOCACHE" - ./ci/touch_git cache: paths: - $CI_PROJECT_DIR/.gocache-false/ key: "${CI_JOB_NAME}-${CI_COMMIT_REF_SLUG}" .go-cache-windows: variables: GODEBUG: gocachetest=1 GOCACHE: $CI_PROJECT_DIR\.gocache-$CI_COMMIT_REF_PROTECTED before_script: - New-Item -Path "$Env:GOCACHE" -Type Directory -Force - $env:GOCACHE = (Resolve-Path $env:GOCACHE).Path - ./ci/touch_git.ps1 cache: paths: - $CI_PROJECT_DIR\.gocache-false\ key: "${CI_JOB_NAME}-${CI_COMMIT_REF_SLUG}" .windows-dependency-checksums: variables: GIT_WINDOWS_AMD64_CHECKSUM: "36498716572394918625476ca207df3d5f8b535a669e9aad7a99919d0179848c" GIT_LFS_WINDOWS_AMD64_CHECKSUM: "94435072f6b3a6f9064b277760c8340e432b5ede0db8205d369468b9be52c6b6" PWSH_WINDOWS_AMD64_CHECKSUM: "ED331A04679B83D4C013705282D1F3F8D8300485EB04C081F36E11EAF1148BD0" .windows1809_variables: variables: WINDOWS_VERSION: servercore1809 WINDOWS_PREBUILT: servercore-ltsc2019 .windows1809: extends: - .windows1809_variables tags: - !reference [.instance-windows-2019] .windows1809_nano: extends: - .windows1809 variables: WINDOWS_VERSION: nanoserver1809 WINDOWS_PREBUILT: nanoserver-ltsc2019 .windows21H2_variables: variables: WINDOWS_VERSION: servercore21H2 WINDOWS_PREBUILT: servercore-ltsc2022 .windows21H2: extends: - .windows21H2_variables tags: - !reference [.instance-windows-2022] .windows21H2_nano: extends: - .windows21H2 variables: WINDOWS_VERSION: nanoserver21H2 WINDOWS_PREBUILT: nanoserver-ltsc2022 # .stage_done is used as a sentinel at stage n for stage n-1 completion, so we can kick off builds in later stages # without explicitly waiting for the completion of the n-1 stage .stage_done: extends: - .no_cache_and_dependencies - .rules:merge_request_pipelines image: alpine:latest variables: GIT_STRATEGY: none script: - exit 0 ================================================ FILE: .gitlab/ci/_kubernetes.gitlab-ci.yml ================================================ .kubernetes runner: interruptible: true timeout: 30m tags: - $KUBERNETES_RUNNER_TAG .unit tests kubernetes limits: variables: # The default limits are defined in https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra # The helper container CPU request is 1, with the build container 3 CPU requests # the scheduler should allocate 4 CPUs for this pod KUBERNETES_CPU_REQUEST: "3" KUBERNETES_MEMORY_REQUEST: "6Gi" KUBERNETES_MEMORY_LIMIT: "6Gi" .check generated files kubernetes limits: variables: KUBERNETES_MEMORY_REQUEST: "6Gi" KUBERNETES_MEMORY_LIMIT: "6Gi" ================================================ FILE: .gitlab/ci/_project_canonical.gitlab-ci.yml ================================================ .instance-default: $RUNNER_TAG_DEFAULT .instance-default-docker: $RUNNER_TAG_DOCKER .instance-2xlarge: $RUNNER_TAG_2XLARGE .instance-medium: $RUNNER_TAG_MEDIUM .instance-windows-2019: $RUNNER_TAG_WINDOWS_2019 .instance-windows-2022: $RUNNER_TAG_WINDOWS_2022 ================================================ FILE: .gitlab/ci/_project_fork.gitlab-ci.yml ================================================ .instance-default: gitlab-org .instance-default-docker: gitlab-org-docker .instance-2xlarge: gitlab-org-docker .instance-medium: gitlab-org-docker # we don't use windows instances on forks, but these # still need to be present because they're referenced, # so we just provide a bogus tag name .instance-windows-2019: not-intended-for-forks .instance-windows-2022: not-intended-for-forks ================================================ FILE: .gitlab/ci/_rules.gitlab-ci.yml ================================================ #################### # Changes patterns # #################### .code-backstage-patterns: &code-backstage-patterns - ".gitlab-ci.yml" - ".golangci.yml" - ".gitlab/ci/**/*" - ".gitattributes" - "Makefile*" - "**/*.go" - "{ci,dockerfiles,packaging,scripts,tests}/**/*" - "**/testdata/**/*" - "go.*" - "tmp/gitlab-test/**" - "VERSION" .docs-patterns: &docs-patterns - ".vale.ini" - ".markdownlint.yml" - "docs/**/*" - "scripts/lint-docs" .docs-i18n-patterns: &docs-i18n-patterns - ".vale.ini" - ".markdownlint.yml" - "docs-locale/**/*" - "scripts/lint-i18n-docs" .docs-all-patterns: &docs-all-patterns - ".vale.ini" - ".markdownlint.yml" - "docs/**/*" - "docs-locale/**/*" - "scripts/lint-docs" - "scripts/lint-i18n-docs" ############## # Conditions # ############## .if-not-canonical-namespace: &if-not-canonical-namespace if: '$CI_PROJECT_NAMESPACE !~ /^gitlab-org($|\/)/' .if-security-project-path: &if-security-project-path if: '$CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner"' .if-default-branch: &if-default-branch if: "$CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH" .if-release-candidate-tag: &if-release-candidate-tag if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+-rc[0-9]+/' .if-stable-release-tag: &if-stable-release-tag if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' .if-merge-request-pipeline: &if-merge-request-pipeline if: $CI_PIPELINE_SOURCE == "merge_request_event" .if-not-web-or-push-pipeline: &if-not-web-or-push-pipeline if: $CI_PIPELINE_SOURCE != "web" && $CI_PIPELINE_SOURCE != "push" .if-runner-merge-request-pipeline: &if-runner-merge-request-pipeline if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" .if-runner-security-merge-request-pipeline: &if-runner-security-merge-request-pipeline if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" .if-not-canonical-namespace-merge-request-pipeline: &if-not-canonical-namespace-merge-request-pipeline if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_NAMESPACE !~ /^gitlab-org($|\/)/ .if-runner-default-branch: &if-runner-default-branch if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" .if-security-runner-default-branch: &if-security-runner-default-branch if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" .if-runner-or-security-runner-default-branch: &if-runner-or-security-runner-default-branch if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && ($CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner") .if-runner-or-security-runner-stable-ref: &if-runner-or-security-runner-stable-ref if: $CI_COMMIT_REF_NAME =~ /\A[0-9]+-[0-9]+-stable\z/ && ($CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner") .if-runner-or-security-runner-feature-ref: &if-runner-or-security-runner-feature-ref if: $CI_COMMIT_REF_NAME =~ /feature\/.+/ && ($CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner") .if-runner-release-ref: &if-runner-release-ref if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?\z/ && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" .if-runner-stable-release-ref: &if-runner-stable-release-ref if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+?\z/ && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" .if-security-runner-release-ref: &if-security-runner-release-ref if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+?\z/ && $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" .if-runner-bleeding-edge-release-ref: &if-runner-bleeding-edge-release-ref if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+-rc[0-9]+\z/ && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" .if-runner-or-security-bleeding-edge-release-ref: &if-runner-or-security-bleeding-edge-release-ref if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+-rc[0-9]+\z/ && ($CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner") .if-not-gitlab-runner-community-path: &if-not-gitlab-runner-community-path if: $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH != null && $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH !~ /^gitlab-org($|\/)/ ######################## # Default branch rules # ######################## .rules:default-branch-only:no_docs: rules: - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns .rules:default-branch-only:no_docs:always: rules: - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns when: always ####################### # Merge Request rules # ####################### .rules:merge_request_pipelines: rules: - <<: *if-merge-request-pipeline - <<: *if-runner-or-security-runner-default-branch - <<: *if-runner-or-security-runner-stable-ref - <<: *if-runner-or-security-runner-feature-ref - <<: *if-runner-release-ref - <<: *if-security-runner-release-ref .rules:merge_request_pipelines:no_docs: rules: - <<: *if-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *code-backstage-patterns - <<: *if-runner-release-ref changes: *code-backstage-patterns - <<: *if-security-runner-release-ref changes: *code-backstage-patterns .rules:merge_request_pipelines:docs: rules: - <<: *if-merge-request-pipeline changes: *docs-patterns - <<: *if-runner-or-security-runner-default-branch changes: *docs-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *docs-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *docs-patterns - <<: *if-runner-release-ref changes: *docs-patterns - <<: *if-security-runner-release-ref changes: *docs-patterns .rules:merge_request_pipelines:docs-i18n: rules: - <<: *if-merge-request-pipeline changes: *docs-i18n-patterns - <<: *if-runner-or-security-runner-default-branch changes: *docs-i18n-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *docs-i18n-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *docs-i18n-patterns - <<: *if-runner-release-ref changes: *docs-i18n-patterns - <<: *if-security-runner-release-ref changes: *docs-i18n-patterns .rules:merge_request_pipelines:docs-all: rules: - <<: *if-merge-request-pipeline changes: *docs-all-patterns - <<: *if-runner-or-security-runner-default-branch changes: *docs-all-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *docs-all-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *docs-all-patterns - <<: *if-runner-release-ref changes: *docs-all-patterns - <<: *if-security-runner-release-ref changes: *docs-all-patterns .rules:merge_request_pipelines:no_docs:no-community-mr: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *code-backstage-patterns - <<: *if-runner-release-ref changes: *code-backstage-patterns - <<: *if-security-runner-release-ref changes: *code-backstage-patterns .rules:merge_request_pipelines:no_docs:no-community-mr:no-security-mr: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-security-project-path when: never - <<: *if-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-release-ref changes: *code-backstage-patterns - <<: *if-security-runner-release-ref changes: *code-backstage-patterns # Rules cannot be merged, instead of opt for creating a new rule like this one .rules:merge_request_pipelines:no_docs:only_canonical: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-runner-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-stable-release-ref changes: *code-backstage-patterns - <<: *if-runner-release-ref changes: *code-backstage-patterns .rules:merge_request_pipelines:no_docs:unit_test: rules: - <<: *if-not-canonical-namespace-merge-request-pipeline changes: *code-backstage-patterns allow_failure: true - <<: *if-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *code-backstage-patterns - <<: *if-runner-release-ref changes: *code-backstage-patterns - <<: *if-security-runner-release-ref changes: *code-backstage-patterns .rules:merge_request_pipelines:no_docs:always: rules: - <<: *if-merge-request-pipeline changes: *code-backstage-patterns when: always - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns when: always - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns when: always - <<: *if-runner-release-ref changes: *code-backstage-patterns when: always - <<: *if-security-runner-release-ref changes: *code-backstage-patterns when: always ################# # Release rules # ################# .rules:release:all: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-default-branch - <<: *if-release-candidate-tag - <<: *if-stable-release-tag .rules:release:bleeding-edge: rules: - <<: *if-not-gitlab-runner-community-path when: never - <<: *if-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-bleeding-edge-release-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *code-backstage-patterns .rules:release-or-security:bleeding-edge-or-stable: rules: - <<: *if-not-gitlab-runner-community-path when: never - <<: *if-runner-or-security-runner-default-branch changes: *code-backstage-patterns - <<: *if-runner-or-security-bleeding-edge-release-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-stable-ref changes: *code-backstage-patterns - <<: *if-runner-or-security-runner-feature-ref changes: *code-backstage-patterns .rules:release:stable-or-rc: rules: - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?$/ && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" changes: *code-backstage-patterns when: on_success - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?$/ && $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" changes: *code-backstage-patterns when: manual .rules:release:stable:branch: rules: - <<: *if-runner-stable-release-ref changes: *code-backstage-patterns - <<: *if-security-runner-release-ref changes: *code-backstage-patterns .rules:release:stable:branch:ignore-changes: rules: - <<: *if-runner-stable-release-ref - <<: *if-security-runner-release-ref .rules:release:development:merge-requests: rules: - <<: *if-runner-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-security-merge-request-pipeline changes: *code-backstage-patterns .rules:release:development:merge-requests:no-community-mr: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-runner-merge-request-pipeline changes: *code-backstage-patterns - <<: *if-runner-security-merge-request-pipeline changes: *code-backstage-patterns .rules:runner-only:release:development:merge-requests: rules: - <<: *if-runner-merge-request-pipeline changes: *code-backstage-patterns .rules:build:test:images:merge-requests: rules: - <<: *if-runner-merge-request-pipeline changes: - tests/dockerfiles/* .rules:prepare:test-ci-scripts:merge-requests: rules: - <<: *if-runner-merge-request-pipeline ############## # Docs rules # ############## .rules:docs:skip: rules: - changes: *docs-patterns when: never - when: on_success .rules:docs:review: rules: - <<: *if-not-canonical-namespace when: never - <<: *if-merge-request-pipeline when: manual ############## # Tags rules # ############## .rules:kubernetes:tag:if-not-canonical: rules: - <<: *if-not-canonical-namespace variables: # !reference doesn't work in variables KUBERNETES_RUNNER_TAG: gitlab-org - if: '$USE_PILOT_RUNNERS == "false"' variables: RUNNER_TAG_DEFAULT: "gitlab-org" RUNNER_TAG_DOCKER: "gitlab-org-docker" RUNNER_TAG_2XLARGE: "saas-linux-2xlarge-amd64" RUNNER_TAG_MEDIUM: "saas-linux-medium-amd64" RUNNER_TAG_WINDOWS_2019: "windows-1809" RUNNER_TAG_WINDOWS_2022: "windows-21h1" - when: always ######################## # Binary signing rules # ######################## .rules:sign_binaries: variables: GCLOUD_PROJECT: gitlab-ci-runners-signing SERVICE_ACCOUNT: gitlab-runner-signer@gitlab-ci-runners-signing.iam.gserviceaccount.com WI_POOL_PROVIDER: //iam.googleapis.com/projects/8522242139/locations/global/workloadIdentityPools/gitlab-pool-oidc-$CI_PROJECT_ID/providers/gitlab-jwt-$CI_PROJECT_ID ================================================ FILE: .gitlab/ci/build.gitlab-ci.yml ================================================ helper images: tags: - !reference [.instance-2xlarge] extends: - .docker - .rules:merge_request_pipelines:no_docs:no-community-mr stage: build needs: - "binaries" script: - ./ci/touch_git - make helper-images - ls -alh out/helper-images/ retry: 2 artifacts: paths: - out/helper-images/ expire_in: 7d parallel: matrix: - TARGETS: - alpine alpine-pwsh ubuntu ubuntu-pwsh ubi-fips concrete - windows-nanoserver-ltsc2019 windows-servercore-ltsc2019 - windows-nanoserver-ltsc2022 windows-servercore-ltsc2022 - windows-servercore-ltsc2025 - windows-servercore-ltsc2025-arm64 prebuilt helper images: tags: - !reference [.instance-2xlarge] extends: - .docker - .rules:merge_request_pipelines:no_docs:no-community-mr stage: build image: "${RUNNER_IMAGES_REGISTRY}/ci:${RUNNER_IMAGES_VERSION}-prebuilt-images" needs: - "helper images: [alpine alpine-pwsh ubuntu ubuntu-pwsh ubi-fips concrete]" script: - make prebuilt-helper-images - ls -alh out/helper-images/ artifacts: paths: - out/helper-images/*.tar.xz - out/helper-images/*.tar.zst expire_in: 7d prebuilt helper images windows 2019: tags: - !reference [.instance-2xlarge] extends: - prebuilt helper images - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "helper images: [windows-nanoserver-ltsc2019 windows-servercore-ltsc2019]" prebuilt helper images windows 2022: tags: - !reference [.instance-2xlarge] extends: - prebuilt helper images - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "helper images: [windows-nanoserver-ltsc2022 windows-servercore-ltsc2022]" prebuilt helper images windows 2025: tags: - !reference [.instance-2xlarge] extends: - prebuilt helper images - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "helper images: [windows-servercore-ltsc2025]" prebuilt helper images windows 2025 arm64: tags: - !reference [.instance-2xlarge] extends: - prebuilt helper images - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "helper images: [windows-servercore-ltsc2025-arm64]" runner images: tags: - !reference [.instance-2xlarge] extends: - .docker - .rules:merge_request_pipelines:no_docs:no-community-mr stage: build needs: - "binaries" script: - ./ci/touch_git - TARGETS="ubuntu alpine ubi-fips" make runner-images - ls -alh out/runner-images/ retry: 2 artifacts: paths: - out/runner-images/ expire_in: 7d test images: extends: - .docker - .rules:build:test:images:merge-requests stage: build needs: - "binaries" script: - docker buildx create --name builder --use --driver docker-container default || true - echo "${CI_REGISTRY_PASSWORD}" | docker login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}" - cd tests/dockerfiles && docker buildx bake --progress plain tests-images --set *.output="type=registry,compression=zstd" - docker logout "${CI_REGISTRY}" binaries: image: "${RUNNER_IMAGES_REGISTRY}/ubi-go:${RUNNER_IMAGES_VERSION}" tags: - !reference [.instance-2xlarge] extends: - .rules:merge_request_pipelines:no_docs:no-community-mr - .rules:sign_binaries - .google-oidc:auth - .go-cache stage: build needs: [] before_script: - !reference [.go-cache, before_script] - !reference [.google-oidc:auth, before_script] script: - go mod download - make -j$(($(nproc) * 2)) helper-bin helper-bin-fips runner-bin runner-bin-fips - | if [[ "$CI_SERVER_HOST" == "gitlab.com" && ("$CI_PROJECT_PATH" == "gitlab-org/gitlab-runner" || "$CI_PROJECT_PATH" == "gitlab-org/security/gitlab-runner") && -n "$CI_COMMIT_TAG" ]]; then echo "Signing binaries..." scripts/sign-windows-binaries scripts/sign-macos-binaries else echo "Not signing binaries" fi artifacts: paths: - out/binaries/gitlab-runner* expire_in: 7d clone test repo: extends: - .rules:merge_request_pipelines:no_docs - .no_cache_and_dependencies - .kubernetes runner stage: build image: alpine:latest needs: [] variables: GIT_STRATEGY: none script: - apk add git - mkdir tmp - succeed=0 - for i in {1..3}; do git clone https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test tmp/gitlab-test && succeed=1 && break; echo "retrying"; done - '[[ "$succeed" -eq 1 ]]' artifacts: paths: - tmp/gitlab-test expire_in: 7d rpm verify fips: stage: build extends: - .rules:merge_request_pipelines:no_docs:no-community-mr - .kubernetes runner image: "registry.gitlab.com/gitlab-org/cloud-native/container-dependencies-finder/cdf:main" needs: - "runner images" - "helper images: [alpine alpine-pwsh ubuntu ubuntu-pwsh ubi-fips concrete]" variables: # VERBOSE: 1 OCI_TARS: |- runner=out/runner-images/ubi-fips.tar helper=out/helper-images/ubi-fips-x86_64.tar # See https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6370 # Disable mtime checks within `rpm --verify` RPM_VERIFY_NOMTIME: "true" script: - ci/rpm_verify_fips artifacts: paths: - logs/ expire_in: 7d # prepare done is used as a sentinel for "Prepare" stage completion, so we can kick off builds in later stages # without waiting for the completion of the Prebuild stage prepare done: stage: build extends: - .stage_done ================================================ FILE: .gitlab/ci/coverage.gitlab-ci.yml ================================================ .coverage_job: extends: - .rules:merge_request_pipelines:no_docs:always stage: coverage test coverage report: extends: - .coverage_job coverage: /regular total:\s+\(statements\)\s+\d+.\d+\%/ needs: - job: unit test optional: true - job: integration test optional: true - job: windows 1809 integration tests optional: true - job: windows 21H2 integration tests optional: true - job: windows 1809 unit tests optional: true - job: windows 21H2 unit tests optional: true script: - make cobertura_report - test -z "$(find .splitic -name 'junit_servercore1809_*.xml' -maxdepth 1 -print -quit)" || .tmp/bin/splitic junit-check -quarantined ci/.test-failures.servercore1809.txt .splitic/junit_servercore1809_*.xml - test -z "$(find .splitic -name 'junit_servercore21H2_*.xml' -maxdepth 1 -print -quit)" || .tmp/bin/splitic junit-check -quarantined ci/.test-failures.servercore21H2.txt .splitic/junit_servercore21H2_*.xml artifacts: reports: coverage_report: coverage_format: cobertura path: out/cobertura/cobertura-*coverage.xml paths: - out/coverage/ expire_in: 7d expose_as: "Code Coverage" # Disable this for now since # https://gitlab.com/gitlab-org/gitlab/-/issues/365885 block us from upgrading # to go 1.18.x. Re-enable this when the above ticket is fixed. #code navigation: # See https://docs.gitlab.com/user/project/code_intelligence/#configuration #extends: #- .coverage_job #allow_failure: true # recommended #needs: #- prepare done #image: sourcegraph/lsif-go:v1.9.0 #script: #- lsif-go #artifacts: #reports: #lsif: dump.lsif ================================================ FILE: .gitlab/ci/deploy.gitlab-ci.yml ================================================ # This job should only run if the UBI images downstream pipeline is successful. # It does not depend on it since the pipeline incorrectly assumes that it has passed # when the trigger job is just created. Instead it depends on the whole postreleases stage. trigger deploy to kubernetes: stage: deploy variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_BRANCH UPSTREAM_CI_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA rules: # Override rules in .trigger-downstream-pipeline-ref to remove MR pipelines - if: '$CI_PROJECT_NAMESPACE !~ /^gitlab-org($|\/)/' when: never - if: $CI_PROJECT_PATH != "gitlab-org/gitlab-runner" when: never - if: $CI_PIPELINE_SOURCE == "merge_request_event" when: never # The KUBERNETES_DEPLOY_BRANCH variable would usually be "main" # but in some cases we might want to deploy from a different branch # For example, a feature branch. - if: $CI_COMMIT_BRANCH == $KUBERNETES_DEPLOY_BRANCH || ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $KUBERNETES_DEPLOY_BRANCH == "") changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME - if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+?\z/ changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME - if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?\z/ changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME trigger: project: gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra ================================================ FILE: .gitlab/ci/docs.gitlab-ci.yml ================================================ .review-docs: stage: docs extends: - .rules:docs:review - .no_cache_and_dependencies - .kubernetes runner image: ruby:${RUBY_VERSION}-alpine needs: [] before_script: - gem install gitlab --no-doc # We need to download the script rather than clone the repo since the # review-docs-cleanup job will not be able to run when the branch gets # deleted (when merging the MR). - apk add --update openssl - wget https://gitlab.com/gitlab-org/gitlab/-/raw/master/scripts/trigger-build.rb - chmod 755 trigger-build.rb variables: GIT_STRATEGY: none # By default, deploy the Review App using the `main` branch of the `gitlab-org/gitlab-docs` project DOCS_BRANCH: main allow_failure: true # Trigger a docs build in docs-gitlab-com project # Useful to preview the docs changes live # https://docs.gitlab.com/development/documentation/review_apps/ review-docs-deploy: extends: - .review-docs environment: name: review-docs/mr-${CI_MERGE_REQUEST_IID} auto_stop_in: 2 weeks url: https://docs.gitlab.com/upstream-review-mr-${DOCS_GITLAB_REPO_SUFFIX}-${CI_MERGE_REQUEST_IID}/${DOCS_GITLAB_REPO_SUFFIX} on_stop: review-docs-cleanup script: - ./trigger-build.rb docs deploy # Cleanup remote environment of docs-gitlab-com review-docs-cleanup: extends: - .review-docs environment: name: review-docs/mr-${CI_MERGE_REQUEST_IID} action: stop script: - ./trigger-build.rb docs cleanup ================================================ FILE: .gitlab/ci/hosted-runners-bridge.gitlab-ci.yml ================================================ .hosted-runners-bridge: stage: postrelease extends: - .kubernetes runner variables: GITLAB_TOKEN: $HOSTED_RUNNERS_BRIDGE_TOKEN script: - mage hostedRunners:bridge hosted runners bridge bleeding edge: extends: - .hosted-runners-bridge rules: - if: !reference [.if-not-canonical-namespace, if] when: never - if: !reference [.if-not-web-or-push-pipeline, if] when: never - if: !reference [.if-runner-default-branch, if] changes: !reference [.code-backstage-patterns] - if: !reference [.if-security-runner-default-branch, if] changes: !reference [.code-backstage-patterns] when: manual needs: - job: bleeding edge docker images artifacts: false - job: unstable pulp artifacts: false hosted runners bridge stable: extends: - .hosted-runners-bridge rules: - if: !reference [.if-not-canonical-namespace, if] when: never - if: !reference [.if-not-web-or-push-pipeline, if] when: never - if: !reference [.if-runner-stable-release-ref, if] changes: !reference [.code-backstage-patterns] - if: !reference [.if-security-runner-release-ref, if] changes: !reference [.code-backstage-patterns] when: manual needs: - job: stable docker images artifacts: false - job: stable pulp artifacts: false ================================================ FILE: .gitlab/ci/package.gitlab-ci.yml ================================================ .packages: extends: - .rules:merge_request_pipelines:no_docs:no-community-mr - .kubernetes runner stage: package environment: name: package/$PKGS/$CI_COMMIT_REF_NAME needs: - "prebuilt helper images" - "binaries" before_script: - | # checking GPG signing support if [ -f "$GPG_KEY_PATH" ]; then cat ${GPG_KEY_PATH} | gpg --batch --no-tty --allow-secret-key-import --import - export GPG_KEYID=$(gpg --with-colon --list-secret-keys | head -n1 | cut -d : -f 5) export GPG_PASSPHRASE=$(cat ${GPG_PASSPHRASE_PATH}) else echo -e "\033[0;31m****** GPG signing disabled ******\033[0m" fi script: - mage package:prepare - mage package:verifyIterationVariable - mage package:$PKGS artifacts: paths: - out/deb/ - out/rpm/ expire_in: 7d package-deb: extends: - .packages parallel: matrix: - PKGS: - deb64 - debArm64 - deb32 - debArm32 - debIbm - debRiscv64 - debLoong64 package-rpm: extends: - .packages parallel: matrix: - PKGS: - rpm64 - rpmArm64 - rpm32 - rpmArm32 - rpmIbm - rpmRiscv64 - rpmFips - rpmLoong64 package-helpers: extends: - .packages script: - mage package:prepare - mage package:verifyIterationVariable - mage package:helpersDeb - mage package:helpersRpm ================================================ FILE: .gitlab/ci/postrelease.gitlab-ci.yml ================================================ .trigger-downstream-pipeline-ref: stage: postrelease variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_BRANCH UPSTREAM_CI_COMMIT_SHORT_SHA: $CI_COMMIT_SHORT_SHA rules: # copy of .rules:merge_request_pipelines:no_docs:only_canonical, slightly modified for variables - if: '$CI_PROJECT_NAMESPACE !~ /^gitlab-org($|\/)/' when: never - if: $CI_PROJECT_PATH != "gitlab-org/gitlab-runner" when: never - if: $CI_PIPELINE_SOURCE == "merge_request_event" changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: "refs/merge-requests/${CI_MERGE_REQUEST_IID}/merge" - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME - if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+?\z/ changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME - if: $CI_COMMIT_REF_NAME =~ /\Av[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?\z/ changes: !reference [.code-backstage-patterns] variables: UPSTREAM_CI_COMMIT_REF: $CI_COMMIT_REF_NAME trigger UBI images build: extends: - .trigger-downstream-pipeline-ref needs: - job: "development S3" optional: true - job: "bleeding edge S3" optional: true - job: "stable S3" optional: true variables: BUILD_RELEASE: $CI_COMMIT_REF_NAME BUILD_RELEASE_SHA: $CI_COMMIT_SHORT_SHA trigger: strategy: depend project: gitlab-org/ci-cd/gitlab-runner-ubi-images trigger runner-incept tests: extends: - .trigger-downstream-pipeline-ref # make sure the needs jobs here match the ones in the downstream pipeline, or jobs in the downstream pipeline will # fail with: "This job could not start because it could not retrieve the needed artifacts." needs: - binaries - "helper images" - "prebuilt helper images" - job: "development docker images" optional: true - job: "bleeding edge docker images" optional: true - job: "stable docker images" optional: true trigger: project: gitlab-org/ci-cd/tests/runner-incept # strategy: depend package test variables: extends: - .trigger-downstream-pipeline-ref - .rules:release:bleeding-edge - .kubernetes runner needs: - "unstable pulp" image: alpine:latest artifacts: paths: - package_test_vars.env before_script: - apk add git bash script: - rm -f package_test_vars.env - echo "export RUNNER_FROM_VERSION=\"$(git tag | sort -rV | sed "1q;d" | cut -c2-)\"" >> package_test_vars.env - echo "export RUNNER_FROM_BRANCH=\"gitlab-runner\"" >> package_test_vars.env - echo "export RUNNER_TO_VERSION=\"$(ci/version)\"" >> package_test_vars.env - echo "export RUNNER_TO_BRANCH=\"unstable\"" >> package_test_vars.env trigger runner package tests: extends: - .trigger-downstream-pipeline-ref - .rules:release:bleeding-edge needs: - "package test variables" trigger: # strategy: depend project: gitlab-org/ci-cd/runner-tools/gitlab-runner-package-tests static QA: extends: - .rules:merge_request_pipelines:no_docs - .no_cache - .kubernetes runner stage: postrelease image: alpine:latest needs: - code_quality script: | if [ "$(cat gl-code-quality-report.json)" != "[]" ] ; then apk add -U --no-cache jq > /dev/null jq -C . gl-code-quality-report.json exit 1 fi .verify-resources: extends: - .rules:merge_request_pipelines:no_docs - .docker stage: postrelease script: - mage resources:verifyAll verify development resources: extends: - .verify-resources - .rules:release:development:merge-requests needs: - job: "development docker images" artifacts: true verify bleeding edge resources: extends: - .verify-resources - .rules:release:bleeding-edge needs: - job: "bleeding edge docker images" artifacts: true verify stable resources: extends: - .verify-resources - .rules:release:stable:branch needs: - job: "stable docker images" artifacts: true stable gitlab release: stage: postrelease extends: - .rules:release:stable-or-rc - .kubernetes runner dependencies: [] image: registry.gitlab.com/gitlab-org/release-cli:latest variables: CHANGELOG: https://gitlab.com/gitlab-org/gitlab-runner/blob/$CI_COMMIT_TAG/CHANGELOG.md S3: https://gitlab-runner-downloads.s3.amazonaws.com/$CI_COMMIT_TAG # Setting the CI_PROJECT_ID variable explicitly because we're running this job # also from the https://gitlab.com/gitlab-org/security/gitlab-runner fork. But it # should still create the release entry in the canonical one. CI_PROJECT_ID: 250833 environment: name: stable/gitlab url: https://gitlab.com/gitlab-org/gitlab-runner/-/releases before_script: [] script: - echo "Releasing to $S3" release: name: "$CI_COMMIT_TAG" description: | See [the changelog]($CHANGELOG) :rocket: GitLab Runner documentation can be found at https://docs.gitlab.com/runner/. tag_name: "$CI_COMMIT_TAG" ref: "$CI_COMMIT_TAG" assets: links: # binaries - name: "binary: Linux amd64" url: "$S3/binaries/gitlab-runner-linux-amd64" filepath: "/binaries/gitlab-runner-linux-amd64" - name: "binary: Linux amd64-fips" url: "$S3/binaries/gitlab-runner-linux-amd64-fips" filepath: "/binaries/gitlab-runner-linux-amd64-fips" - name: "binary: Linux 386" url: "$S3/binaries/gitlab-runner-linux-386" filepath: "/binaries/gitlab-runner-linux-386" - name: "binary: Linux arm" url: "$S3/binaries/gitlab-runner-linux-arm" filepath: "/binaries/gitlab-runner-linux-arm" - name: "binary: Linux ppc64el" url: "$S3/binaries/gitlab-runner-linux-ppc64el" filepath: "/binaries/gitlab-runner-linux-ppc64el" - name: "binary: Linux s390x" url: "$S3/binaries/gitlab-runner-linux-s390x" filepath: "/binaries/gitlab-runner-linux-s390x" - name: "binary: Linux riscv64" url: "$S3/binaries/gitlab-runner-linux-riscv64" filepath: "/binaries/gitlab-runner-linux-riscv64" - name: "binary: Linux loong64" url: "$S3/binaries/gitlab-runner-linux-loong64" filepath: "/binaries/gitlab-runner-linux-loong64" - name: "binary: macOS amd64" url: "$S3/binaries/gitlab-runner-darwin-amd64" filepath: "/binaries/gitlab-runner-darwin-amd64" - name: "binary: macOS arm64" url: "$S3/binaries/gitlab-runner-darwin-arm64" filepath: "/binaries/gitlab-runner-darwin-arm64" - name: "binary: FreeBSD amd64" url: "$S3/binaries/gitlab-runner-freebsd-amd64" filepath: "/binaries/gitlab-runner-freebsd-amd64" - name: "binary: FreeBSD arm" url: "$S3/binaries/gitlab-runner-freebsd-arm" filepath: "/binaries/gitlab-runner-freebsd-arm" - name: "binary: FreeBSD 386" url: "$S3/binaries/gitlab-runner-freebsd-386" filepath: "/binaries/gitlab-runner-freebsd-386" - name: "binary: Windows amd64" url: "$S3/binaries/gitlab-runner-windows-amd64.zip" filepath: "/binaries/gitlab-runner-windows-amd64.zip" - name: "binary: Windows arm64" url: "$S3/binaries/gitlab-runner-windows-arm64.zip" filepath: "/binaries/gitlab-runner-windows-arm64.zip" - name: "binary: Windows i386" url: "$S3/binaries/gitlab-runner-windows-386.zip" filepath: "/binaries/gitlab-runner-windows-386.zip" # DEB packages - name: "package: DEB amd64" url: "$S3/deb/gitlab-runner_amd64.deb" filepath: "/packages/deb/gitlab-runner_amd64.deb" - name: "package: DEB i686" url: "$S3/deb/gitlab-runner_i686.deb" filepath: "/packages/deb/gitlab-runner_i686.deb" - name: "package: DEB armel" url: "$S3/deb/gitlab-runner_armel.deb" filepath: "/packages/deb/gitlab-runner_armel.deb" - name: "package: DEB armhf" url: "$S3/deb/gitlab-runner_armhf.deb" filepath: "/packages/deb/gitlab-runner_armhf.deb" - name: "package: DEB aarch64" url: "$S3/deb/gitlab-runner_aarch64.deb" filepath: "/packages/deb/gitlab-runner_aarch64.deb" - name: "package: DEB arm64" url: "$S3/deb/gitlab-runner_arm64.deb" filepath: "/packages/deb/gitlab-runner_arm64.deb" - name: "package: DEB ppc64el" url: "$S3/deb/gitlab-runner_ppc64el.deb" filepath: "/packages/deb/gitlab-runner_ppc64el.deb" - name: "package: DEB s390x" url: "$S3/deb/gitlab-runner_s390x.deb" filepath: "/packages/deb/gitlab-runner_s390x.deb" - name: "package: DEB riscv64" url: "$S3/deb/gitlab-runner_riscv64.deb" filepath: "/packages/deb/gitlab-runner_riscv64.deb" # RPM packages - name: "package: RPM amd64" url: "$S3/rpm/gitlab-runner_amd64.rpm" filepath: "/packages/rpm/gitlab-runner_amd64.rpm" - name: "package: RPM amd64-fips" url: "$S3/rpm/gitlab-runner_amd64-fips.rpm" filepath: "/packages/rpm/gitlab-runner_amd64-fips.rpm" - name: "package: RPM i686" url: "$S3/rpm/gitlab-runner_i686.rpm" filepath: "/packages/rpm/gitlab-runner_i686.rpm" - name: "package: RPM arm" url: "$S3/rpm/gitlab-runner_arm.rpm" filepath: "/packages/rpm/gitlab-runner_arm.rpm" - name: "package: RPM armhf" url: "$S3/rpm/gitlab-runner_armhf.rpm" filepath: "/packages/rpm/gitlab-runner_armhf.rpm" - name: "package: RPM arm64" url: "$S3/rpm/gitlab-runner_arm64.rpm" filepath: "/packages/rpm/gitlab-runner_arm64.rpm" - name: "package: RPM aarch64" url: "$S3/rpm/gitlab-runner_aarch64.rpm" filepath: "/packages/rpm/gitlab-runner_aarch64.rpm" - name: "package: RPM ppc64le" url: "$S3/rpm/gitlab-runner_ppc64le.rpm" filepath: "/packages/rpm/gitlab-runner_ppc64le.rpm" - name: "package: RPM s390x" url: "$S3/rpm/gitlab-runner_s390x.rpm" filepath: "/packages/rpm/gitlab-runner_s390x.rpm" - name: "package: RPM riscv64" url: "$S3/rpm/gitlab-runner_riscv64.rpm" filepath: "/packages/rpm/gitlab-runner_riscv64.rpm" # Other files - name: "checksums" url: "$S3/release.sha256" filepath: "/release.sha256" - name: "checksums GPG signature" url: "$S3/release.sha256.asc" filepath: "/release.sha256.asc" - name: "other release artifacts" url: "$S3/index.html" filepath: "/index.html" ================================================ FILE: .gitlab/ci/prepare.gitlab-ci.yml ================================================ .image_builder: extends: - .docker stage: prepare image: docker:${DOCKER_VERSION}-git script: - apk add --no-cache --upgrade curl - source ./ci/build_ci_image prepare ci image: extends: - .image_builder - .rules:prepare:ci:image:merge-requests variables: BUILD_IMAGE: $CI_IMAGE BUILD_DOCKERFILE: ./dockerfiles/ci/Dockerfile PWSH_VERSION: "7.4.6-1" prepare alpine-no-root image: extends: - .image_builder - .rules:prepare:alpine-no-root:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest BUILD_DOCKERFILE: ./tests/dockerfiles/alpine-no-root/Dockerfile prepare alpine-entrypoint image: extends: - .image_builder - .rules:prepare:alpine-entrypoint:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/alpine-entrypoint:latest BUILD_DOCKERFILE: ./tests/dockerfiles/alpine-entrypoint/Dockerfile prepare alpine-entrypoint-stderr image: extends: - .image_builder - .rules:prepare:alpine-entrypoint:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/alpine-entrypoint-stderr:latest BUILD_DOCKERFILE: ./tests/dockerfiles/alpine-entrypoint/Dockerfile.stderr prepare alpine-entrypoint-pre-post-trap image: extends: - .image_builder - .rules:prepare:alpine-entrypoint:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/alpine-entrypoint-pre-post-trap:latest BUILD_DOCKERFILE: ./tests/dockerfiles/alpine-entrypoint/Dockerfile.pre-post-trap prepare powershell-entrypoint-pre-post-trap image: extends: - .image_builder - .rules:prepare:powershell-entrypoint:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/powershell-entrypoint-pre-post-trap:latest BUILD_DOCKERFILE: ./tests/dockerfiles/powershell-entrypoint/Dockerfile.pre-post-trap prepare alpine-id-overflow image: extends: - .image_builder - .rules:prepare:alpine-id-overflow:image:merge-requests variables: BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/alpine-id-overflow:latest BUILD_DOCKERFILE: ./tests/dockerfiles/alpine-id-overflow/Dockerfile prepare helper-entrypoint image: extends: - .docker - .rules:prepare:gitlab-runner-helper-entrypoint:image:merge-requests stage: prepare variables: BUILD_DOCKERFILE_BASEDIR: ./tests/dockerfiles/gitlab-runner-helper-entrypoint BUILD_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner/helper-entrypoint:latest BUILD_DOCKERFILE: "$BUILD_DOCKERFILE_BASEDIR/dockerfile" script: - make helper-bin-host - mkdir -p "$BUILD_DOCKERFILE_BASEDIR/binaries/" - cp out/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64 "$BUILD_DOCKERFILE_BASEDIR/binaries/gitlab-runner-helper" - source ./ci/build_ci_image prepare go fips: extends: - .docker - .rules:prepare:go-fips:image:merge-requests stage: prepare image: docker:${DOCKER_VERSION}-git variables: BUILD_IMAGE: $GO_FIPS_IMAGE GO_VERSION: $GO_FIPS_VERSION GO_FIPS_BASE_IMAGE: "redhat/${GO_FIPS_UBI_VERSION}-minimal:latest" script: - apk add --no-cache --upgrade curl make bash - make go-fips-docker prepare ubi base: extends: - .docker - .rules:prepare:ubi-base:image:merge-requests timeout: 4h stage: prepare image: docker:${DOCKER_VERSION}-git script: - apk add --no-cache --upgrade curl make bash - make ubi-fips-base-docker test ci scripts: stage: prepare extends: - .rules:prepare:test-ci-scripts:merge-requests needs: - job: "prepare ci image" optional: true script: - make test_go_scripts ================================================ FILE: .gitlab/ci/qa.gitlab-ci.yml ================================================ check version definition: stage: qa extends: - .rules:merge_request_pipelines - .kubernetes runner needs: [] script: - grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' VERSION check modules: stage: qa extends: - .rules:merge_request_pipelines:no_docs - .no_cache_and_dependencies - .kubernetes runner needs: - "prepare done" script: - make check_modules check generated files: stage: qa extends: - .rules:merge_request_pipelines:no_docs - .no_cache_and_dependencies - .kubernetes runner - .check generated files kubernetes limits needs: - "prepare done" script: - make check_generated_files check magefiles: stage: qa extends: - .rules:merge_request_pipelines:no_docs - .no_cache_and_dependencies - .kubernetes runner needs: - "prepare done" script: - make check_magefiles check test directives: stage: qa extends: - .rules:merge_request_pipelines:no_docs - .no_cache_and_dependencies - .kubernetes runner needs: - "prepare done" script: - make check_test_directives code_quality: stage: qa extends: - .rules:merge_request_pipelines:no_docs - .no_dependencies # Use GOCACHE instead of GOLANGCI_LINT_CACHE # to avoid [false lint positives](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/2187#note_373008672) - .go-cache image: registry.gitlab.com/gitlab-org/ci-cd/runner-tools/runner-linters:${GOLANGLINT_VERSION}-go${GO_VERSION} needs: [] allow_failure: true variables: REPORT_FILE: gl-code-quality-report.json LINT_FLAGS: "--color=never --timeout=15m" OUT_FORMAT: "--output.code-climate.path=gl-code-quality-report.json" before_script: - !reference [.go-cache, before_script] # Ensure the goargs linter plugin is available at .tmp/bin/goargs.so to suit .golangci.yml - mkdir -p .tmp/bin && ln -s /usr/lib/goargs.so .tmp/bin/goargs.so # Copy the Docker image's golangci-lint to the location expected by `make lint` so that it is not built - cp $(which golangci-lint) .tmp/bin/golangci-lint script: - make --silent lint timeout: 20 minutes artifacts: reports: codequality: ${REPORT_FILE} paths: - ${REPORT_FILE} when: always expire_in: 7d mage tests: extends: - .rules:merge_request_pipelines:no_docs - .kubernetes runner stage: qa needs: - "prepare done" script: - make mage-test # Perform documentation linting and link checking on Markdown files docs:lint markdown: image: $DOCS_LINT_IMAGE stage: qa extends: - .rules:merge_request_pipelines:docs - .no_cache_and_dependencies - .kubernetes runner needs: [] script: # Makefiles in this project expect Go to be available - apk add --quiet go # Lint content and Markdown, and check links - make lint-docs VALE_MIN_ALERT_LEVEL=error docs:lint i18n markdown: image: $DOCS_LINT_IMAGE stage: qa extends: - .rules:merge_request_pipelines:docs-i18n - .no_cache_and_dependencies needs: [] allow_failure: true script: # Makefiles in this project expect Go to be available - apk add --quiet go # Lint i18n content and Markdown, and check links - make lint-i18n-docs VALE_MIN_ALERT_LEVEL=error # Verify localized documentation files have corresponding English versions docs:lint i18n paths: image: $DOCS_LINT_IMAGE stage: qa extends: - .rules:merge_request_pipelines:docs-i18n - .no_cache_and_dependencies - .kubernetes runner needs: [] allow_failure: true script: # Run the i18n path verification script - ./scripts/docs-i18n-verify-paths docs:check feature flags: stage: qa extends: - .rules:merge_request_pipelines - .no_cache_and_dependencies - .kubernetes runner needs: [] script: - cp docs/configuration/feature-flags.md docs/configuration/feature-flags.md.orig - make update_feature_flags_docs - | diff docs/configuration/feature-flags.md.orig docs/configuration/feature-flags.md || ( echo echo "Feature Flags list in documentation is not up-to-date" echo "Run 'make update_feature_flags_docs' to update it" echo exit 1 ) docs:check development docs Go version: extends: - .rules:merge_request_pipelines:docs - .no_cache_and_dependencies - .kubernetes runner needs: [] stage: qa script: - export GO_VERSION=$(cat .gitlab/ci/_common.gitlab-ci.yml | yq '.variables.GO_VERSION') - export EXIT_CODE=0 - grep $GO_VERSION docs/development/_index.md || EXIT_CODE=$? - if [ $EXIT_CODE -ne 0 ]; then echo "Make sure to update all Go versions in docs/development/_index.md to $GO_VERSION"; exit 1; fi docs:check Kubernetes API docs: extends: - .rules:merge_request_pipelines:docs - .no_cache_and_dependencies - .kubernetes runner needs: [] stage: qa script: - cp docs/executors/kubernetes/_index.md docs/executors/kubernetes/_index.md.orig - mage k8s:generatePermissionsDocs - | diff docs/executors/kubernetes/_index.md.orig docs/executors/kubernetes/_index.md || ( echo echo "Kubernetes API list in documentation is not up-to-date" echo "Run 'mage k8s:generatePermissionsDocs' to update it" echo exit 1 ) # This jobs is triggered weekly and needs either aq PRIVATE_TOKEN or CI_JOB_TOKEN variable. docs:check supported distros package docs: rules: - if: $CI_PIPELINE_SOURCE == "schedule" && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" stage: qa script: - cp docs/install/linux-repository.md docs/install/linux-repository.md.orig - mage package:docs - | diff docs/install/linux-repository.md.orig docs/install/linux-repository.md || ( echo echo "Supported distributions documentation is not up-to-date" echo "Run 'mage package:docs' to update it" echo exit 1 ) docs:check Hugo build: image: registry.gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/docs-gitlab-com-builder:hugo-0.150.1 extends: - .rules:merge_request_pipelines:docs-all - .no_cache_and_dependencies - .kubernetes runner needs: [] stage: qa variables: DOCS_BRANCH: "main" before_script: # Check if this a release branch, which would be the case for a backport. # If this is a backport MR, we need to checkout the appropriate version # of the Docs website. - | if [[ $CI_MERGE_REQUEST_TARGET_BRANCH_NAME =~ [0-9]+-[0-9]+-stable ]]; then BRANCH_NAME=$CI_MERGE_REQUEST_TARGET_BRANCH_NAME echo "Detected merge request to stable branch: $BRANCH_NAME" # Check if we're directly on a stable branch (direct push/commit) elif [[ $CI_COMMIT_BRANCH =~ [0-9]+-[0-9]+-stable ]]; then BRANCH_NAME=$CI_COMMIT_BRANCH echo "Detected direct commit to stable branch: $BRANCH_NAME" fi # Extract version info if we found a stable branch if [[ -n $BRANCH_NAME ]]; then MAJOR=$(echo $BRANCH_NAME | cut -d '-' -f 1) MINOR=$(echo $BRANCH_NAME | cut -d '-' -f 2) # Convert GitLab Runner style (17-9-stable) to GitLab Docs style (17.9) DOCS_BRANCH_CANDIDATE="$MAJOR.$MINOR" # Check if the branch exists in the Docs website repo, fallback to main if not if git ls-remote --heads --exit-code https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com.git "refs/heads/$DOCS_BRANCH_CANDIDATE" >/dev/null 2>&1; then DOCS_BRANCH="$DOCS_BRANCH_CANDIDATE" echo "Using docs-gitlab-com branch $DOCS_BRANCH for release branch" else DOCS_BRANCH="main" echo "Branch $DOCS_BRANCH_CANDIDATE does not exist, falling back to main" fi fi # Clone the GitLab Docs project - echo "Cloning Docs site $DOCS_BRANCH branch..." - git clone --depth 1 --filter=tree:0 --branch $DOCS_BRANCH https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com.git - cd docs-gitlab-com - make add-latest-icons script: # Test that Hugo will build - hugo --gc --printPathWarnings --panicOnWarning --environment test # Test for invalid index pages # See https://gitlab.com/gitlab-org/technical-writing/docs-gitlab-com/-/blob/main/scripts/check-index-filenames.sh - make check-index-pages SEARCH_DIR="../docs" - make check-index-pages SEARCH_DIR="../docs-locale" yaml:lint: stage: qa image: node:alpine needs: [] extends: - .rules:merge_request_pipelines - .kubernetes runner script: - npm install -g prettier - echo "Checking YAML formatting in .gitlab/ci/ directory..." - prettier --check ".gitlab/ci/**/*.{yml,yaml}" --log-level warn ================================================ FILE: .gitlab/ci/rebase.gitlab-ci.yml ================================================ # Rebase branches in $REPO_REBASE_BRANCHES, separated by a comma on top of main rebase on main: extends: - .no_cache_and_dependencies stage: rebase rules: - if: $REPO_REBASE_BRANCHES != "" && $REPO_REBASE_BRANCHES != null && $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" allow_failure: true script: - | git config --global user.email "gitlab-runner-ci-rebase@gitlab.com" git config --global user.name "GitLab Runner CI Rebase" git fetch origin $CI_DEFAULT_BRANCH git checkout -B $CI_DEFAULT_BRANCH origin/$CI_DEFAULT_BRANCH git remote add push-remote https://oauth2:${REPO_REBASE_PUSH_TOKEN}@${CI_SERVER_HOST}/${CI_PROJECT_PATH}.git IFS=',' read -ra BRANCHES <<< "$REPO_REBASE_BRANCHES" for BRANCH in "${BRANCHES[@]}"; do echo "Processing branch: $BRANCH" git fetch origin $BRANCH git checkout $BRANCH if ! git rebase $CI_DEFAULT_BRANCH; then echo "Rebase failed for branch $BRANCH" exit 1 fi # --force-with-lease doesn't work on shallow clones git push push-remote $BRANCH --force git checkout $CI_DEFAULT_BRANCH done ================================================ FILE: .gitlab/ci/release.gitlab-ci.yml ================================================ # S3 Releases ############# .release_s3: stage: release dependencies: - "prebuilt helper images" - "binaries" - "package-deb" - "package-rpm" - "package-helpers" before_script: - ./ci/touch_git - | # checking GPG signing support if [ -f "$GPG_KEY_PATH" ]; then export GPG_KEY=$(cat ${GPG_KEY_PATH}) export GPG_PASSPHRASE=$(cat ${GPG_PASSPHRASE_PATH}) else echo -e "\033[0;31m****** GPG signing disabled ******\033[0m" fi script: - make release_s3 tags: - !reference [.instance-medium] .release_pulp: stage: release dependencies: - "package-deb" - "package-rpm" - "package-helpers" before_script: - ./ci/touch_git - mage pulp:createConfig script: - mage pulp:push deb "$CI_JOB_NAME" "$DIST_FLAVOR" - mage pulp:push rpm "$CI_JOB_NAME" "$DIST_FLAVOR" .release_artifacts: artifacts: paths: - out/release_artifacts/* development S3: extends: - .release_s3 - .rules:runner-only:release:development:merge-requests environment: name: development/s3/${CI_COMMIT_REF_NAME} url: https://gitlab-runner-downloads.s3.amazonaws.com/${CI_COMMIT_REF_NAME}/index.html bleeding edge S3: extends: - .release_s3 - .rules:release:bleeding-edge environment: name: bleeding_edge/s3 url: https://gitlab-runner-downloads.s3.amazonaws.com/${CI_COMMIT_REF_NAME}/index.html stable S3: extends: - .release_s3 - .rules:release:stable:branch environment: name: stable/s3 url: https://gitlab-runner-downloads.s3.amazonaws.com/${CI_COMMIT_REF_NAME}/index.html unstable pulp: extends: - .release_pulp - .rules:release:bleeding-edge environment: name: bleeding_edge/pulp url: https://pulp.gitlab.com/runner/unstable parallel: matrix: - DIST_FLAVOR: - debian - ubuntu - el - fedora - amazon - sles - opensuse stable pulp: extends: - .release_pulp - .rules:release:stable:branch environment: name: stable/pulp url: https://pulp.gitlab.com/runner/gitlab-runner parallel: matrix: - DIST_FLAVOR: - debian - ubuntu - raspbian - linuxmint - el - ol - fedora - amazon - sles - opensuse # Image Registry Releases ######################### .overwrite_security_docker_variables: &overwrite_security_docker_variables | if [[ $CI_COMMIT_REF_NAME =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && [[ $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" ]]; then export CI_REGISTRY="registry.gitlab.com" export CI_REGISTRY_IMAGE="registry.gitlab.com/gitlab-org/gitlab-runner" export CI_REGISTRY_USER=$CI_REGISTRY_USER_CANONICAL export CI_REGISTRY_PASSWORD=$CI_REGISTRY_PASSWORD_CANONICAL fi development docker images: stage: release extends: - .docker - .go-cache - .rules:release:development:merge-requests needs: - "helper images" - "runner images" artifacts: paths: - out/release_artifacts/* script: - *overwrite_security_docker_variables - echo "${CI_REGISTRY_PASSWORD}" | docker login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}" - (cd scripts/pusher && go build) - tags=$(make print_image_tags) # dev gitlab registry images - (cd scripts/pusher && ./pusher runner-images.json "${CI_REGISTRY_IMAGE}/gitlab-runner-dev" $tags) - (cd scripts/pusher && ./pusher helper-images.json "${CI_REGISTRY_IMAGE}/gitlab-runner-helper-dev" $tags) bleeding edge docker images: stage: release extends: - .docker - .go-cache - .rules:release:bleeding-edge environment: name: bleeding_edge/docker_images/linux url: https://hub.docker.com/r/gitlab/gitlab-runner/tags/ needs: - "helper images" - "runner images" artifacts: paths: - out/release_artifacts/* script: - *overwrite_security_docker_variables - echo "${CI_REGISTRY_PASSWORD}" | docker login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}" - echo "${DOCKER_HUB_PASSWORD}" | docker login --username "${DOCKER_HUB_USER}" --password-stdin "registry.hub.docker.com/gitlab" - (cd scripts/pusher && go build) - tags=$(make print_image_tags) # bleeding gitlab registry images - (cd scripts/pusher && ./pusher runner-images.json "${CI_REGISTRY_IMAGE}" $tags) - (cd scripts/pusher && ./pusher helper-images.json "${CI_REGISTRY_IMAGE}/gitlab-runner-helper" $tags) # bleeding docker hub registry images - (cd scripts/pusher && ./pusher runner-images.json "registry.hub.docker.com/gitlab/gitlab-runner" $tags) - (cd scripts/pusher && ./pusher helper-images.json "registry.hub.docker.com/gitlab/gitlab-runner-helper" $tags) stable docker images: stage: release variables: # Setting the CI_PROJECT_ID and CI_REGISTRY_IMAGE variable explicitly because we're # running this job also from the https://gitlab.com/gitlab-org/security/gitlab-runner # fork. But it should still create the release entry in the canonical one. CI_REGISTRY: registry.gitlab.com CI_REGISTRY_IMAGE: registry.gitlab.com/gitlab-org/gitlab-runner extends: - .docker - .go-cache - .rules:release:stable:branch environment: name: stable/docker_images/linux url: https://hub.docker.com/r/gitlab/gitlab-runner/tags/ dependencies: - "helper images" - "runner images" artifacts: paths: - out/release_artifacts/* script: - *overwrite_security_docker_variables - echo "${CI_REGISTRY_PASSWORD}" | docker login --username "${CI_REGISTRY_USER}" --password-stdin "${CI_REGISTRY}" - echo "${DOCKER_HUB_PASSWORD}" | docker login --username "${DOCKER_HUB_USER}" --password-stdin "registry.hub.docker.com/gitlab" - aws --region us-east-1 ecr-public get-login-password | docker login --username "AWS" --password-stdin "public.ecr.aws/gitlab" - (cd scripts/pusher && go build) - tags=$(make print_image_tags) # stable gitlab registry images - (cd scripts/pusher && ./pusher runner-images.json "${CI_REGISTRY_IMAGE}" $tags) - (cd scripts/pusher && ./pusher helper-images.json "${CI_REGISTRY_IMAGE}/gitlab-runner-helper" $tags) # stable docker hub registry images - (cd scripts/pusher && ./pusher runner-images.json "registry.hub.docker.com/gitlab/gitlab-runner" $tags) - (cd scripts/pusher && ./pusher helper-images.json "registry.hub.docker.com/gitlab/gitlab-runner-helper" $tags) # stable aws registry images - (cd scripts/pusher && ./pusher runner-images.json "public.ecr.aws/gitlab/gitlab-runner" $tags) - (cd scripts/pusher && ./pusher helper-images.json "public.ecr.aws/gitlab/gitlab-runner-helper" $tags) ================================================ FILE: .gitlab/ci/test-kubernetes-integration.gitlab-ci.yml ================================================ .integration kubernetes: extends: - .rules:merge_request_pipelines:no_docs:no-community-mr:no-security-mr tags: - $KUBERNETES_RUNNER_INTEGRATION_TAG stage: test kubernetes integration needs: - "provision integration kubernetes" - job: "development docker images" optional: true - job: "bleeding edge docker images" optional: true - job: "stable docker images" optional: true variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: "k8s-runner-integration-tests-runner-$CI_PIPELINE_ID" before_script: - go install gotest.tools/gotestsum@latest script: # Note: We use hide-summary=output due to https://github.com/gotestyourself/gotestsum/issues/423 - > gotestsum --format=testname --format-hide-empty-pkg --rerun-fails=3 \ --hide-summary=output --packages=gitlab.com/gitlab-org/gitlab-runner/executors/kubernetes \ --junitfile=junit_report.xml --junitfile-hide-empty-pkg -- \ -timeout=10m -parallel=20 $EXTRA_GO_TEST_FLAGS \ -tags=integration,kubernetes ./executors/kubernetes/... artifacts: when: always paths: - junit_report.xml reports: junit: junit_report.xml provision integration kubernetes: extends: - .integration kubernetes needs: - "prepare done" variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: "k8s-runner-integration-tests-provisioner" script: - mage k8s:provisionIntegrationKubernetes $CI_PIPELINE_ID integration kubernetes exec legacy: extends: - .integration kubernetes resource_group: "$CI_COMMIT_REF_SLUG-k8s-integration-exec-legacy" variables: CI_RUNNER_TEST_FEATURE_FLAG: "FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY" CI_RUNNER_TEST_FEATURE_FLAG_VALUE: "true" EXTRA_GO_TEST_FLAGS: "-run=TestRunIntegrationTestsWithFeatureFlag" integration kubernetes attach: extends: - .integration kubernetes resource_group: "$CI_COMMIT_REF_SLUG-k8s-integration-attach" variables: CI_RUNNER_TEST_FEATURE_FLAG: "FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY" CI_RUNNER_TEST_FEATURE_FLAG_VALUE: "false" EXTRA_GO_TEST_FLAGS: "-run=TestRunIntegrationTestsWithFeatureFlag" integration kubernetes: extends: - .integration kubernetes resource_group: "$CI_COMMIT_REF_SLUG-k8s-integration" variables: EXTRA_GO_TEST_FLAGS: "-skip=TestRunIntegrationTestsWithFeatureFlag" destroy integration kubernetes: extends: - .integration kubernetes needs: - job: "integration kubernetes" optional: true - job: "integration kubernetes exec legacy" optional: true - job: "integration kubernetes attach" optional: true variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: "k8s-runner-integration-tests-provisioner" script: - mage k8s:destroyIntegrationKubernetes $CI_PIPELINE_ID ================================================ FILE: .gitlab/ci/test.gitlab-ci.yml ================================================ include: - component: ${CI_SERVER_FQDN}/components/dependency-scanning/main@1.1.1 - component: ${CI_SERVER_FQDN}/components/sast/sast@3.4.0 inputs: run_advanced_sast: true - template: Security/Coverage-Fuzzing.latest.gitlab-ci.yml # Overriding security scanning jobs from templates, because # we need to replace the rules with our own, the same # as in `.merge_request_pipelines` template. dependency-scanning: rules: !reference [".rules:merge_request_pipelines:no_docs", rules] variables: FF_SCRIPT_TO_STEP_MIGRATION: "false" # Disable the FF because it breaks the component gitlab-advanced-sast: rules: !reference [".rules:merge_request_pipelines:no_docs", rules] .linux test: extends: - .go-cache stage: test artifacts: paths: - .splitic/ when: always expire_in: 7d reports: junit: .splitic/junit_*.xml unit test: extends: - .linux test - .rules:merge_request_pipelines:no_docs:unit_test - .kubernetes runner - .unit tests kubernetes limits needs: - "clone test repo" - "prepare done" script: - make splitic - > .tmp/bin/splitic test -race \ -flaky .flaky-tests.txt \ -junit-report .splitic/junit_report.xml \ -cover-report .splitic/cover_0.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... \ -tags !integration -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_unix.env ./... \ -- -ldflags "$(make print_test_ldflags)" .linux integration test: extends: - .docker .with outer token: variables: OUTER_CI_JOB_TOKEN: ${CI_JOB_TOKEN} integration test: extends: - .linux test - .rules:merge_request_pipelines:no_docs:no-community-mr - .linux integration test - .with outer token needs: - "clone test repo" - "prebuilt helper images" - "prepare done" script: - docker import out/helper-images/prebuilt-alpine-latest-x86_64.tar.xz registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-latest - go install gitlab.com/gitlab-org/fleeting/fleeting-plugin-static/cmd/fleeting-plugin-static@latest - make splitic - > .tmp/bin/splitic test \ -flaky .flaky-tests.txt \ -junit-report .splitic/junit_${CI_NODE_INDEX}.xml \ -cover-report .splitic/cover_${CI_NODE_INDEX}.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... \ -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_unix.env \ -tags integration \ ./... \ -- -ldflags "$(make print_test_ldflags)" -timeout 25m parallel: 4 integration test (docker, steps): extends: - integration test variables: RUNNER_TEST_FEATURE_FLAGS: "FF_SCRIPT_TO_STEP_MIGRATION" script: - docker import out/helper-images/prebuilt-alpine-latest-x86_64.tar.xz registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-latest - go install gitlab.com/gitlab-org/fleeting/fleeting-plugin-static/cmd/fleeting-plugin-static@latest - make splitic - > .tmp/bin/splitic test \ -flaky .flaky-tests.txt \ -junit-report .splitic/junit_${CI_NODE_INDEX}_docker_steps.xml \ -cover-report .splitic/cover_${CI_NODE_INDEX}_docker_steps.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... \ -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_unix.env \ -tags integration \ ./executors/docker \ -- -ldflags "$(make print_test_ldflags)" -timeout 1h parallel: 1 when: manual allow_failure: true integration test (docker, concrete, steps): extends: - integration test variables: RUNNER_TEST_FEATURE_FLAGS: "FF_CONCRETE" script: - docker import out/helper-images/prebuilt-alpine-latest-x86_64.tar.xz registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-latest - go install gitlab.com/gitlab-org/fleeting/fleeting-plugin-static/cmd/fleeting-plugin-static@latest - make splitic - > .tmp/bin/splitic test \ -flaky .flaky-tests.txt \ -junit-report .splitic/junit_${CI_NODE_INDEX}_docker_concrete_steps.xml \ -cover-report .splitic/cover_${CI_NODE_INDEX}_docker_concrete_steps.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... \ -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_unix.env \ -tags integration \ ./executors/docker \ -- -ldflags "$(make print_test_ldflags)" -timeout 1h parallel: 1 when: manual allow_failure: true integration test with race: extends: - integration test - .go-cache variables: CGO_ENABLED: "1" script: - make splitic - > .tmp/bin/splitic test \ -flaky .flaky-tests.txt \ -race \ -junit-report .splitic/junit_${CI_NODE_INDEX}.xml \ -cover-report .splitic/cover_${CI_NODE_INDEX}.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... \ -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_unix.env \ -tags integration \ ./... \ -- -ldflags "$(make print_test_ldflags)" -timeout 40m fuzz variable mask: extends: - .fuzz_base - .no_dependencies - .rules:merge_request_pipelines:no_docs - .kubernetes runner image: golang:$GO_VERSION stage: test variables: COVFUZZ_SEED_CORPUS: "./common/buildlogger/internal/testdata/corpus" script: - apt update && apt install -y clang - go install github.com/dvyukov/go-fuzz/go-fuzz@latest && go install github.com/dvyukov/go-fuzz/go-fuzz-build@latest && go get github.com/dvyukov/go-fuzz/go-fuzz-dep@latest - go-fuzz-build -libfuzzer -o fuzz_variable_mask.a -preserve crypto/internal/bigmod ./common/buildlogger/internal - clang -fsanitize=fuzzer fuzz_variable_mask.a -o fuzz_variable_mask - ./gitlab-cov-fuzz run -- ./fuzz_variable_mask -only_ascii=1 -max_len=128 -max_total_time=300 allow_failure: false .windows test: extends: - .rules:merge_request_pipelines:no_docs - .go-cache-windows stage: test parallel: 6 before_script: - !reference [.go-cache-windows, before_script] - start-service docker - | @( @{DisableRealtimeMonitoring = $true} ) | Foreach-Object { Set-MpPreference @_ } - git config --system core.longpaths true - New-ItemProperty -Path "HKLM:\SYSTEM\CurrentControlSet\Control\FileSystem" ` -Name "LongPathsEnabled" -Value 1 -PropertyType DWORD -Force - $ProgressPreference = 'SilentlyContinue' - (Measure-Command { curl -o golang-windows-amd64.zip $env:RUNNER_IMAGES_WINDOWS_GO_URL }).TotalSeconds - if (Test-Path "C:\Program Files\Go") { Remove-Item -Path "C:\Program Files\Go" -Recurse -Force } - New-Item -Path "C:\Program Files\Go" -ItemType Directory > $null - (Measure-Command { 7z x .\golang-windows-amd64.zip -o"C:\Program Files\Go" }).TotalSeconds - rm golang-windows-amd64.zip - $env:Path = "C:\Program Files\Go\bin;$env:Path" - go version - echo $env:GOCACHE - go env GOCACHE - go install gitlab.com/ajwalker/splitic@latest - $env:Path += ";$(go env GOPATH)/bin" artifacts: paths: - .splitic/ when: always expire_in: 7d reports: junit: .splitic/junit_*.xml allow_failure: exit_codes: 99 .windows unit test: extends: - .windows test parallel: 2 needs: - "clone test repo" #- 'prepare done' script: - splitic test -flaky .flaky-tests.txt -junit-report .splitic/junit_${WINDOWS_VERSION}_${CI_NODE_INDEX}.xml -quarantined ci/.test-failures.${WINDOWS_VERSION}.txt -cover-report .splitic/cover_windows_${CI_NODE_INDEX}.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_windows.env -fail-exit-code 99 ./... -- -timeout 30m .windows integration test: extends: - .windows test - .with outer token parallel: 4 needs: - "clone test repo" - "prepare done" script: # pre-pull windows images before starting tests - choco install -y zstandard --version=$ZSTD_VERSION --checksum64=$ZSTD_CHECKSUM - zstd -d out/helper-images/prebuilt-windows-${WINDOWS_PREBUILT}-x86_64.docker.tar.zst - $output = docker load --input "out/helper-images/prebuilt-windows-${WINDOWS_PREBUILT}-x86_64.docker.tar" 2>&1 - $image_id = ($output | Select-String "Loaded image ID:").ToString().Split()[3] - docker tag ${image_id} gitlab/gitlab-runner-helper:x86_64-bleeding-${WINDOWS_VERSION} - docker tag ${image_id} registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-latest-${WINDOWS_VERSION} - docker pull registry.gitlab.com/gitlab-org/ci-cd/tests/liveness:0.1.0 - docker network create -d "nat" test-network - docker network rm test-network - go install gitlab.com/gitlab-org/fleeting/fleeting-plugin-static/cmd/fleeting-plugin-static@latest - splitic test -flaky .flaky-tests.txt -junit-report .splitic/junit_${WINDOWS_VERSION}_${CI_NODE_INDEX}.xml -quarantined ci/.test-failures.${WINDOWS_VERSION}.txt -cover-report .splitic/cover_windows_${CI_NODE_INDEX}.profile -cover -coverpkg gitlab.com/gitlab-org/gitlab-runner/... -env-passthrough ./scripts/envs/allowlist_common.env -env-passthrough ./scripts/envs/allowlist_windows.env -fail-exit-code 99 -tags integration ./... -- -timeout 55m windows 1809 compile tests: extends: - .windows unit test - .windows1809 - .rules:merge_request_pipelines:no_docs:no-community-mr parallel: null script: - go test -count=1 --tags=integration,kubernetes -run=nope ./... - go test -count=1 --tags=integration,steps -run=nope ./... - go test -count=1 --tags=integration -run=nope ./... - go test -count=1 -run=nope ./... windows 21H2 compile tests: extends: - .windows unit test - .windows21H2 - .rules:merge_request_pipelines:no_docs:no-community-mr parallel: null script: - go test -count=1 --tags=integration,kubernetes -run=nope ./... - go test -count=1 --tags=integration,steps -run=nope ./... - go test -count=1 --tags=integration -run=nope ./... - go test -count=1 -run=nope ./... # Only test the oldest supported version in merge requests, # but test all versions in the default branch. windows 1809 unit tests: extends: - .windows unit test - .windows1809 - .rules:merge_request_pipelines:no_docs:no-community-mr windows 21H2 unit tests: extends: - .windows unit test - .windows21H2 - .rules:merge_request_pipelines:no_docs:no-community-mr windows 1809 integration tests: extends: - .windows integration test - .windows1809 - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "clone test repo" - "prepare done" - "prebuilt helper images windows 2019" windows 21H2 integration tests: extends: - .windows integration test - .windows21H2 - .rules:merge_request_pipelines:no_docs:no-community-mr needs: - "clone test repo" - "prepare done" - "prebuilt helper images windows 2022" logging-field-validator: stage: test image: golang:latest extends: - .no_dependencies - .rules:merge_request_pipelines:no_docs - .go-cache script: - make validate-log-fields ================================================ FILE: .gitlab/dependency_decisions.yml ================================================ --- - - :license - github.com/ayufan/golang-kardianos-service - zlib - :who: :why: :versions: [] :when: 2019-05-03 07:43:31.343341000 Z - - :license - github.com/pmezard/go-difflib - New BSD - :who: :why: :versions: [] :when: 2019-05-03 07:56:01.347974000 Z - - :license - github.com/pkg/errors - BSD-2-Clause - :who: :why: :versions: [] :when: 2019-05-03 07:56:16.618717000 Z - - :license - github.com/howeyc/gopass - ISC - :who: :why: :versions: [] :when: 2019-05-03 07:56:49.679855000 Z - - :license - github.com/Nvveen/Gotty - BSD-2-Clause - :who: :why: :versions: [] :when: 2019-05-03 07:57:00.398541000 Z - - :license - github.com/gorilla/websocket - BSD-2-Clause - :who: :why: :versions: [] :when: 2019-05-03 07:57:11.021835000 Z - - :license - github.com/gorhill/cronexpr - Apache-2.0 - :who: :why: :versions: [] :when: 2019-05-03 07:57:23.329188000 Z - - :license - github.com/golang/glog - Apache-2.0 - :who: :why: :versions: [] :when: 2019-05-03 07:57:34.443986000 Z - - :license - github.com/go-ini/ini - Apache-2.0 - :who: :why: :versions: [] :when: 2019-05-03 07:57:59.782275000 Z - - :license - github.com/davecgh/go-spew - ISC - :who: :why: :versions: [] :when: 2019-05-03 07:58:11.728785000 Z - - :license - golang.org/x/crypto/ssh/terminal - BSD-3-clause - :who: :why: :versions: [] :when: 2019-05-03 07:58:23.789185000 Z ================================================ FILE: .gitlab/duo/agent-config.yml ================================================ # Update this tag when go.mod or GOLANGLINT_VERSION in the Makefile changes. image: registry.gitlab.com/gitlab-org/ci-cd/runner-tools/runner-linters:2.11.4-go1.26.1 setup_script: - export GOTOOLCHAIN=local # GOMODCACHE must be project-relative so the cache archiver can reach it. - export GOMODCACHE="${CI_PROJECT_DIR}/.cache/gomod" # Remove image's pre-installed node to avoid version conflicts. - rm -rf /root/.nvm/versions/node/ || true - NODE_VERSION=20.20.0 - NODE_SHA256=92dfd59fb4837230abba5d6dd717b882ca897e22fde2f9268e1aac2c4bde0f5b - NODE_DIR="${CI_PROJECT_DIR}/.cache/node" - | if ! "${NODE_DIR}/bin/node" --version 2>/dev/null | grep -qF "v${NODE_VERSION}"; then echo "Installing Node.js ${NODE_VERSION}..." curl -fsSL "https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz" -o /tmp/node.tar.gz echo "${NODE_SHA256} /tmp/node.tar.gz" | sha256sum -c - rm -rf "${NODE_DIR}" mkdir -p "${NODE_DIR}" tar -xzf /tmp/node.tar.gz -C "${NODE_DIR}" --strip-components=1 --no-same-owner rm /tmp/node.tar.gz fi - export PATH="${NODE_DIR}/bin:${PATH}" - node --version && npm --version - npm config set prefix "${CI_PROJECT_DIR}/.cache/npm-global" - export PATH="${CI_PROJECT_DIR}/.cache/npm-global/bin:${PATH}" cache: key: files: - go.sum prefix: gitlab-runner-duo-v2 paths: - .cache/node/ # Node.js runtime — version-checked, skips download on hit - .cache/gomod/ # Go module cache (GOMODCACHE redirected here) - .cache/npm-global/ # duo-cli and other global npm packages - .tmp/ # make tools: golangci-lint, mockery, etc. - tmp/ # make development_setup: test git fixtures ================================================ FILE: .gitlab/duo/mr-review-instructions.yaml ================================================ --- # Custom instructions for GitLab Duo Code Review # Based on GitLab's official code review guidelines # References: # - Field Standardisation in Observability: https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/observability_field_standardisation/ # This file defines custom review criteria that will be applied to specific files # during merge request reviews. Instructions are grouped by name and can target # multiple file patterns using glob syntax. instructions: - name: Log Field Standards fileFilters: - "**/*.go" - "!**/*_test.go" instructions: | Backend engineers should be complying with the new field standardisation in observability best practices 1. For any log lines that have been altered: - Ask: "If you are adding or modifying fields that aren't service specific, please ensure that the field is defined within the LabKit Go Fields package" - Remind: "All logging fields should be defined within the LabKit fields package and imported from there provided that they aren't specific to this service Examples are GitLabUserID. Link: https://gitlab.com/gitlab-org/labkit/-/tree/master/fields?ref_type=heads" 2. For any new fields being added to log messages: - Check to ensure that these fields are not dynamically generated - Remind: "We're aiming to standardise the fields that we emit across all of our services at GitLab through this approach. Read more about this in https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/observability_field_standardisation/" ================================================ FILE: .gitlab/issue_templates/Bug.md ================================================ ## Summary ## Steps to reproduce
.gitlab-ci.yml ```yml Add the job definition that is failing here ```
## Actual behavior ## Expected behavior ## Relevant logs and/or screenshots
job log ```sh Add the job log ```
## Environment description
config.toml contents ```toml Add your configuration here ```
### Used GitLab Runner version ## Possible fixes /label ~bug ~"group::runner" ~"Category:Runner Core" ================================================ FILE: .gitlab/issue_templates/Default.md ================================================ If you experience a problem with CI/CD on GitLab.com, please raise an issue in https://gitlab.com/gitlab-com/support-forum/issues Before raising an issue here, please read through our guide to help determine the best place to post: * https://about.gitlab.com/getting-help/ Select the "Bug" or "Feature Proposal" template from the "Description" selector and provide as much information as possible. Thank you for helping to make GitLab Runner a better product! :heart: /label ~"group::runner" ================================================ FILE: .gitlab/issue_templates/Documentation.md ================================================ ### Problem to solve ### Further details ### Proposal ### Who can address the issue ### Other links/references /label ~documentation ~group::runner ~devops::verify ================================================ FILE: .gitlab/issue_templates/Feature Flag Cleanup.md ================================================ ## Summary This issue is to clean up the `` feature flag, after the feature flag has been enabled by default for an appropriate amount of time in production. ## Owners - Team: GitLab Runner - Most appropriate Slack channel to reach out to: `#g_runner` - Best individual to reach out to: NAME - PM: NAME ## Stakeholders ## Expectations ### What might happen if this goes wrong? Please list here all the steps that must be taken if something goes wrong: - Any MRs that need to be rolled back? - Communication that needs to happen? - What are some things you can think of that could go wrong in the context of GitLab Runner and the existing setups? - What settings needs to be changed back, e.g. Feature Flag, or `config.toml` settings ? ### Cleaning up the feature flag In most use cases, removing a feature flag will be a breaking change. This breaking change must be planned in accordance with the GitLab's policy on breaking changes. - [ ] Specify in the issue description if this feature will be removed completely or will be productized as part of the Feature Flag cleanup - [ ] Create a merge request to remove `` feature flag. Ask for review and merge it. - [ ] Remove all references to the feature flag from the codebase. - [ ] Remove the documentations for the feature from the repository. - [ ] Remove the documentations for the feature from related repository (GitLab, GitLab Runner Helm Chart, GitLab Runner Operator). - [ ] Ensure that the cleanup MR has been deployed at the code cutoff. - [ ] Close [the feature issue](ISSUE LINK) to indicate the feature will be released in the current milestone. - [ ] Close this feature flag cleanup issue. /label ~"feature flag" ~"section::ci" ~"group::runner" ~"DevOps::verify" ~"Category:Runner Core" ~"runner::core" ================================================ FILE: .gitlab/issue_templates/Feature Flag Roll Out.md ================================================ ## Summary This issue is to roll out [the feature]() on production, that is currently behind the `` feature flag. ## Owners - Most appropriate Slack channel to reach out to: `#` - Best individual to reach out to: @ ## Expectations ### What are we expecting to happen? ### What can go wrong and how would we detect it? ## Rollout Steps ### Rollout on non-production environments - Verify the MR that adds the feature flag is merged to `main` and has been deployed after code freeze, for the GitLab Runner context, to the privately managed runners. This might require a synchronisation with the appropriate team to make sure that the `config.toml` used by those runners are updated to include the newly added feature flag. Some feature flags are executor specific and deploying them on the private runners would only make sense if these executors are used. A recommendation should be to make sure that there is an existing runner, using the relevant executor and actively running jobs (GitLab Runner pipeline jobs by example) that exists. - [ ] Deploy the feature flag at a percentage (recommended percentage: 50%) on the concerned private runners managed by the GitLab Runner team - [ ] Monitor that the error rates did not increase (repeat with a different percentage as necessary). - [ ] Enable the feature globally on all private runners managed by the GitLab Runner team - [ ] Verify that the feature works as expected. - [ ] If the feature flag causes end-to-end tests to fail, disable the feature flag on private runners to avoid blocking pipelines For assistance with end-to-end test failures, please reach out via the [`#g_runner` Slack channel](https://gitlab.enterprise.slack.com/archives/CBQ76ND6W). ### Rollout on production ## Rollback Steps /label /label ~"feature flag" ~"section::ci" ~"group::runner" ~"DevOps::verify" ~"Category:Runner Core" ~"runner::core" /assign @ /due in 12 weeks ================================================ FILE: .gitlab/issue_templates/Feature Proposal.md ================================================ ## Description ## Proposal ## Links to related issues and merge requests / references /label ~feature ~"group::runner" ================================================ FILE: .gitlab/issue_templates/Request for test infra feature toggle.md ================================================ # Summary # Feature source merge request # Infra merge request /label ~"devops::verify" /label ~"group::runner" /label ~"section::ci" /label ~"Runner Kubernetes Dogfooding" /label ~"Runner Kubernetes Dogfooding::Feature Toggle" ================================================ FILE: .gitlab/issue_templates/Security developer workflow.md ================================================ ## Prior to starting the security release work - [ ] Read the [security process for developers] if you are not familiar with it. - [ ] Mark this [issue as related] to the [upcoming Security Release Tracking Issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues?scope=all&utf8=%E2%9C%93&state=opened&label_name[]=security&label_name[]=upcoming%20security%20release). - Fill out the [Links section](#links): - [ ] Next to **Issue on GitLab**, add a link to the `gitlab-org/gitlab-runner` issue that describes the security vulnerability. ## Development - [ ] Run `scripts/security-harness` in your local repository to prevent accidentally pushing to any remote branch besides `gitlab.com/gitlab-org/security`. - [ ] Create a new branch, prefixing it with `security-`. - [ ] Create a merge request targeting `main` on `gitlab.com/gitlab-org/security/gitlab-runner` and use the [Security Release merge request template]. After your merge request has been approved according to our [approval guidelines] and by a team member of the AppSec team, you're ready to prepare the backports. ## Backports - [ ] Once the MR is ready to be merged, create MRs targeting the latest 3 stable branches. * At this point, it might be easier to squash the commits from the MR into one. - [ ] Create each MR targeting the stable branch `X-Y-stable`, using the [Security Release merge request template]. * Every merge request has its own set of TODOs, so make sure to complete those. - [ ] On the "Related merge requests" section, ensure all MRs are linked to this issue. * This section should only list the merge requests created for this issue: One targeting `main` and the 3 backports. ## Documentation and final details - [ ] Ensure the [Links section](#links) is completed. - [ ] Add the GitLab Runner [versions](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/developer.md#versions-affected) and editions affected to the [details section](#details). * The Git history of the files affected may help you associate the issue with a [release](https://about.gitlab.com/releases/). - [ ] Fill in any upgrade notes that users may need to take into account in the [details section](#details). - [ ] Add Yes/No and further details if needed to the migration and settings columns in the [details section](#details). - [ ] Add the nickname of the external user who found the issue (and/or HackerOne profile) to the Thanks row in the [details section](#details). ## Summary ### Links | Description | Link | | -------- | -------- | | Issue on [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner/issues) | #TODO | ### Details | Description | Details | Further details| | -------- | -------- | -------- | | Versions affected | X.Y | | | Upgrade notes | | | | GitLab Runner config updated | Yes/No| | | Thanks | | | [security process for developers]: https://gitlab.com/gitlab-org/release/docs/blob/master/general/security/developer.md [security Release merge request template]: https://gitlab.com/gitlab-org/security/gitlab-runner/blob/main/.gitlab/merge_request_templates/Security%20Release.md [approval guidelines]: https://docs.gitlab.com/development/code_review/#approval-guidelines [issue as related]: https://docs.gitlab.com/user/project/issues/related_issues/#add-a-linked-issue /label ~security ~"Category:Runner" ~"devops::verify" ~"group::runner" ================================================ FILE: .gitlab/issue_templates/bump-golang.md ================================================ ### Steps 1. [ ] bump golang in [goargs](https://gitlab.com/gitlab-org/language-tools/go/linters/goargs) example MR: - https://gitlab.com/gitlab-org/language-tools/go/linters/goargs/-/merge_requests/8 1. [ ] bump golang in [runner-linters](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-linters) example MR: - https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-linters/-/merge_requests/7 1. [ ] bump golang et al in [gitlab-runner](https://gitlab.com/gitlab-org/gitlab-runner) Things we want to bump: - the golang version itself - the version of the runner-linters image - Update `GO_FIPS_VERSION_SUFFIX`, get the suffix from [here](https://github.com/golang-fips/go/releases) - Poke some files to force rebuild of images: ``` find . -name '*.rebuild' | xargs -r -n1 "$SHELL" -c 'date -u > "$1"' -- ``` example MRs: - https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4838/ - https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3889 ================================================ FILE: .gitlab/issue_templates/planning_issue.md ================================================ ## :paperclips: Cross-Functional Programs ## :runner: Runner Core #### :bug: Bugs ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab-runner" and milestone = "%%.%" and label = ("type::bug", "Category:Runner Core") ``` #### :sparkles: Features ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab-runner" and milestone = "%%.%" and label= "type::feature" and label="Category:Runner Core" ``` #### :tools: Maintenance ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab-runner" and milestone = "%%.%" and label= "type::maintenance" and label="Category:Runner Core" ``` ~Stretch ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab-runner" and milestone = "%%.%" and label= "stretch" and label="Category:Runner Core" ``` ## :roller_coaster: Runner Fleet #### :bug: Bugs ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab" and milestone = "%%.%" and label= "type::bug" and label="Fleet Visibility" ``` #### :sparkles: Features ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab" and milestone = "%%.%" and label= "type::feature" and label="Category:Fleet Visibility" ``` #### :tools: Maintenance ~"Runner::P1" ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab" and milestone = "%%.%" and label= "type::maintenance" and label="Category:Fleet Visibility" ``` ~Stretch ```glql --- display: table fields: title, epic, assignees, healthStatus, state --- project="gitlab-org/gitlab" and milestone = "%%.%" and label= "stretch" and label="Category:Fleet Visibility" ``` ================================================ FILE: .gitlab/issue_templates/trainee-backend-maintainer.md ================================================ ## Basic setup 1. [ ] Read the [Becoming a maintainer for one of Runner team projects](https://about.gitlab.com/handbook/engineering/development/ci-cd/verify/runner/#becoming-a-maintainer-for-one-of-our-projects). 1. [ ] Read the [code review page in the handbook](https://about.gitlab.com/handbook/engineering/workflow/code-review/) and the [code review guidelines](https://docs.gitlab.com/development/code_review/). 1. [ ] Understand [how to become a maintainer](https://about.gitlab.com/handbook/engineering/workflow/code-review/#how-to-become-a-maintainer). 1. [ ] Add yourself as a [trainee maintainer](https://about.gitlab.com/handbook/engineering/workflow/code-review/#trainee-maintainer) on the [team page](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/team.yml). 1. [ ] Ask your manager to set up a check-in on this issue every six weeks or so. ## Working towards becoming a maintainer There is no checklist here, only guidelines. There is no specific timeline on this, but historically most backend trainee maintainers have become maintainers five to seven months after starting their training. You are free to discuss your progress with your manager or any maintainer at any time. As in the list above, your manager should review this issue with you roughly every six weeks; this is useful to track your progress, and see if there are any changes you need to make to move forward. It is up to you to ensure that you are getting enough MRs to review, and of varied types. All engineers are reviewers, so you should already be receiving regular reviews from Reviewer Roulette. You could also seek out more reviews from your team, or #backend Slack channels. Your reviews should aim to cover maintainer responsibilities as well as reviewer responsibilities. Your approval means you think it is ready to merge. After each MR is merged or closed, add a discussion to this issue using this template: ```markdown ### (Merge request title): (Merge request URL) During review: - (List anything of note, or a quick summary. "I suggested/identified/noted...") Post-review: - (List anything of note, or a quick summary. "I missed..." or "Merged as-is") (Maintainer who reviewed this merge request) Please add feedback, and compare this review to the average maintainer review. ``` **Note:** Do not include reviews of security MRs because review feedback might reveal security issue details. ## When you're ready to make it official When reviews have accumulated, you can confidently address the majority of the MRs assigned to you, and recent reviews consistently fulfill maintainer responsibilities, then you can propose yourself as a new maintainer for the relevant application. Remember that even when you are a maintainer, you can still request help from other maintainers if you come across an MR that you feel is too complex or requires a second opinion. 1. [ ] Create a merge request for [team page](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/team.yml) proposing yourself as a maintainer for the relevant application, assigned to your manager. 1. [ ] Ask a maintainer to add you as an Owner to the relevant maintainers list in 1. [ ] Keep reviewing, start merging :metal: /label ~"trainee maintainer" ~"devops::verify" ~"group::runner" ================================================ FILE: .gitlab/merge.release.yml ================================================ actions: - write: file: VERSION contents: "{{ .Release.VersionObject.NextMinor.StringNoPrefix }}" - commit: files: [VERSION] message: Bump version to {{ .Release.VersionObject.NextMinor }} ================================================ FILE: .gitlab/merge_request_templates/Default.md ================================================ ## What does this MR do? %{first_multiline_commit} ## Why was this MR needed? ## What's the best way to test this MR? ## What are the relevant issue numbers? ================================================ FILE: .gitlab/merge_request_templates/Documentation.md ================================================ ## What does this MR do? ## Related issues ## Author's checklist - [ ] Optional. Consider taking [the GitLab Technical Writing Fundamentals course](https://university.gitlab.com/courses/gitlab-technical-writing-fundamentals). - [ ] Follow the: - [Documentation process](https://docs.gitlab.com/development/documentation/workflow/). - [Documentation guidelines](https://docs.gitlab.com/development/documentation/). - [Style Guide](https://docs.gitlab.com/development/documentation/styleguide/). - [ ] If you're adding or changing the main heading of the page (H1), ensure that the [product availability details](https://docs.gitlab.com/development/documentation/styleguide/availability_details/) are added. - [ ] If you are a GitLab team member, [request a review](https://docs.gitlab.com/development/code_review/#dogfooding-the-reviewers-feature) based on: - The documentation page's [metadata](https://docs.gitlab.com/development/documentation/metadata/). - The [associated Technical Writer](https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments). If you are a GitLab team member and only adding documentation, do not add any of the following labels: - `~"frontend"` - `~"backend"` - `~"type::bug"` - `~"database"` These labels cause the MR to be added to code verification QA issues. ## Reviewer's checklist Documentation-related MRs should be reviewed by a Technical Writer for a non-blocking review, based on [Documentation Guidelines](https://docs.gitlab.com/development/documentation/) and the [Style Guide](https://docs.gitlab.com/development/documentation/styleguide/). If you aren't sure which tech writer to ask, use [roulette](https://gitlab-org.gitlab.io/gitlab-roulette/?sortKey=stats.avg30&order=-1&hourFormat24=true&visible=maintainer%7Cdocs) or ask in the [#docs](https://gitlab.slack.com/archives/C16HYA2P5) Slack channel. - [ ] If the content requires it, ensure the information is reviewed by a subject matter expert. - Technical writer review items: - [ ] Ensure docs metadata is present and up-to-date. - [ ] Ensure the appropriate [labels](https://docs.gitlab.com/development/documentation/workflow/#labels) are added to this MR. - [ ] Ensure a release milestone is set. - If relevant to this MR, ensure [content topic type](https://docs.gitlab.com/development/documentation/topic_types/) principles are in use, including: - [ ] The headings should be something you'd do a Google search for. Instead of `Default behavior`, say something like `Default behavior when you close an issue`. - [ ] The headings (other than the page title) should be active. Instead of `Configuring GDK`, say something like `Configure GDK`. - [ ] Any task steps should be written as a numbered list. - If the content still needs to be edited for topic types, you can create a follow-up issue with the ~"docs-technical-debt" label. - [ ] Review by assigned maintainer, who can always request/require the above reviews. Maintainer's review can occur before or after a technical writer review. /label ~documentation ~"devops::verify" ~"group::runner-core" ~"Category:Runner" ~"type::maintenance" ~"maintenance::refactor" /assign me ================================================ FILE: .gitlab/merge_request_templates/Security Release.md ================================================ ## Related issues ## Developer checklist - [ ] **In the "Related issues" section, write down the [GitLab Runner Security] issue it belongs to (i.e. `Related to `).** - [ ] Merge request targets `main`, or a versioned stable branch (`X-Y-stable`). - [ ] Milestone is set for the version this merge request applies to. A closed milestone can be assigned via [quick actions]. - [ ] Title of this merge request is the same as for all backports. - [ ] For the MR targeting `main`: - [ ] Assign to a reviewer and maintainer, per our [Code Review process]. - [ ] Ensure it's approved according to our [Approval Guidelines]. - [ ] Ensure it's approved by an AppSec engineer. - If you're unsure who should approve, find the AppSec engineer associated to the issue in the [Canonical repository], or ask #sec-appsec on Slack. - [ ] When approving, the AppSec engineer should mention this MR on the [security release tracking issue] in the `gitlab-org/gitlab` project for awareness - [ ] Merge request _must_ close the corresponding security issue. - [ ] Ensure that a backport MR targeting a versioned stable branch (`X-Y-stable`) is approved by a maintainer. **Note:** Reviewer/maintainer should not be a Release Manager ## Maintainer checklist - [ ] Correct milestone is applied and the title is matching across all backports. - [ ] Assign the merge request to the release manager of the [upcoming security release](https://gitlab.com/gitlab-org/gitlab-runner/-/issues?scope=all&utf8=%E2%9C%93&state=opened&label_name[]=security&label_name[]=upcoming%20security%20release) with passing CI pipelines and **when all backports including the MR targeting main are ready.** ## AppSec checklist - [ ] Assign the right [AppSecWeight](https://handbook.gitlab.com/handbook/security/product-security/application-security/milestone-planning/#weight-labels) label /label ~security ~"Category:Runner" ~"devops::verify" ~"group::runner" /label ~"Division::Security" ~"Department::Product Security" ~"Application Security Team" /label ~"AppSecWorkflow::planned" ~"AppSecWorkType::VulnFixVerification" /label ~"AppSecPriority::1" [GitLab Runner Security]: https://gitlab.com/gitlab-org/security/gitlab-runner [quick actions]: https://docs.gitlab.com/user/project/quick_actions/#quick-actions-for-issues-merge-requests-and-epics [Code Review process]: https://docs.gitlab.com/development/code_review/ [Approval Guidelines]: https://docs.gitlab.com/development/code_review/#approval-guidelines [Canonical repository]: https://gitlab.com/gitlab-org/gitlab-runner [security release tracking issue]: https://gitlab.com/gitlab-org/gitlab/-/issues/?scope=all&utf8=%E2%9C%93&state=opened&label_name%5B%5D=upcoming%20security%20release ================================================ FILE: .gitlab/renovate.json ================================================ { "extends": [ ":disableMajorUpdates" ], "regexManagers": [ { "fileMatch": [ "\\.gitlab\\/ci\\/_common\\.gitlab-ci\\.yml", "\\.tool-versions", "dockerfiles\\/ci\\/Dockerfile" ], "matchStrings": [ "# renovate: (datasource=(?\\S+))?\\s?(depName=(?\\S+))?\\s?(registryUrl=(?\\S+))?\\s?(versioning=(?\\S+))?\\s?(allowedVersions=(?\\S+))?\\s?.*?_VERSION:\\s?\\\"?(?[\\w+\\.\\-]*)", "# renovate: (datasource=(?\\S+))?\\s?(depName=(?\\S+))?\\s?(registryUrl=(?\\S+))?\\s?(versioning=(?\\S+))?\\s?(allowedVersions=(?\\S+))?\\s\\w+\\s(?[\\w+\\.\\-]*)", "# renovate: (datasource=(?\\S+))?\\s?(depName=(?\\S+))?\\s?(registryUrl=(?\\S+))?\\s?(versioning=(?\\S+))?\\s?(allowedVersions=(?\\S+))?\\sFROM\\s\\w+\\:(?[\\w+\\.\\-]*)" ], "allowedVersionsTemplate": "{{allowedVersions}}" } ], "enabledManagers": ["regex"], "reviewers": ["ggeorgiev_gitlab"], "recreateClosed": true } ================================================ FILE: .gitlab/route-map.yml ================================================ # Documentation - source: /docs/(.+?/)_index\.md/ # docs/configuration/_index.md public: '\1' # configuration/ - source: /docs/(.+?)\.md/ # docs/configuration/page.md public: '\1/' # configuration/page/ ================================================ FILE: .gitlab-ci.yml ================================================ stages: - build - qa - test - coverage - package - release - test kubernetes integration - postrelease - deploy - rebase - docs include: - local: /.gitlab/ci/_project_canonical.gitlab-ci.yml rules: - if: $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" when: always - local: /.gitlab/ci/_project_fork.gitlab-ci.yml rules: - if: $CI_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_PROJECT_PATH == "gitlab-org/security/gitlab-runner" when: never - if: $CI_PROJECT_PATH - local: /.gitlab/ci/_common.gitlab-ci.yml - local: /.gitlab/ci/_rules.gitlab-ci.yml - local: /.gitlab/ci/_kubernetes.gitlab-ci.yml - local: /.gitlab/ci/build.gitlab-ci.yml - local: /.gitlab/ci/qa.gitlab-ci.yml - local: /.gitlab/ci/test.gitlab-ci.yml - local: /.gitlab/ci/test-kubernetes-integration.gitlab-ci.yml - local: /.gitlab/ci/coverage.gitlab-ci.yml - local: /.gitlab/ci/package.gitlab-ci.yml - local: /.gitlab/ci/release.gitlab-ci.yml - local: /.gitlab/ci/postrelease.gitlab-ci.yml - local: /.gitlab/ci/deploy.gitlab-ci.yml - local: /.gitlab/ci/docs.gitlab-ci.yml - local: /.gitlab/ci/rebase.gitlab-ci.yml - local: /.gitlab/ci/hosted-runners-bridge.gitlab-ci.yml - component: ${CI_SERVER_FQDN}/gitlab-org/components/danger-review/danger-review@2.1.0 inputs: job_stage: qa rules: - if: '$CI_SERVER_HOST == "gitlab.com" && ($CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == "gitlab-org/gitlab-runner" || $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == "gitlab-org/security/gitlab-runner" || $CI_MERGE_REQUEST_SOURCE_PROJECT_PATH == "gitlab-community/gitlab-org/gitlab-runner")' - project: gitlab-com/gl-security/security-operations/infrastructure-security-public/oidc-modules ref: 3.4.0 file: templates/gcp_auth.yaml ================================================ FILE: .golangci.yml ================================================ version: "2" run: concurrency: 8 linters: default: none enable: - bodyclose - dogsled - errcheck - errorlint - gocognit - goconst - gocritic - goprintffuncname - govet - ineffassign - misspell - nakedret - nestif - revive - staticcheck - unconvert - unparam - usetesting - whitespace settings: errcheck: check-type-assertions: true errorlint: asserts: false comparison: false gocognit: min-complexity: 15 gocritic: enabled-checks: - appendCombine - boolExprSimplify - commentedOutCode - dupImport - emptyFallthrough - emptyStringTest - equalFold - evalOrder - hexLiteral - indexAlloc - initClause - methodExprCall - nestingReduce - nilValReturn - ptrToRefParam - rangeExprCopy - regexpPattern - sloppyReassign - stringXbytes - truncateCmp - typeAssertChain - typeUnparen - unnecessaryBlock - weakCond - yodaStyleExpr gocyclo: min-complexity: 10 revive: rules: - name: unused-parameter disabled: true staticcheck: checks: ["all", "-ST1000", "-ST1003", "-ST1005", "-ST1012", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-QF1001", "-QF1008", "-QF1011"] usetesting: os-create-temp: false os-mkdir-temp: false os-setenv: false os-temp-dir: false os-chdir: false context-background: true context-todo: true exclusions: generated: lax presets: - comments - common-false-positives - legacy - std-error-handling rules: - linters: - gocyclo path: helpers/shell_escape.go - linters: - gocyclo path: executors/kubernetes/kubernetes_test.go - linters: - gocyclo path: executors/kubernetes/util_test.go - linters: - gocyclo path: executors/kubernetes/exec_test.go - linters: - gocyclo path: executors/parallels/ - linters: - gocyclo path: executors/virtualbox/ - linters: - revive text: don't use ALL_CAPS in Go names; use CamelCase - linters: - revive text: don't use an underscore in package name - linters: - bodyclose - gocognit - goconst path: .*_test.go - linters: - errcheck path: .*_test.go text: Error return value is not checked - linters: - errcheck - gocritic path: .*_test.go text: regexpMust - linters: - gocritic path: .*_test.go text: typeUnparen - linters: - unused path: executors/docker/docker_command_test.go # Ignore until https://gitlab.com/gitlab-org/gitlab-runner/-/issues/25385 is solved paths: - mock_*.go - third_party$ - builtin$ - examples$ formatters: enable: - goimports exclusions: generated: lax paths: - mock_*.go - third_party$ - builtin$ - examples$ ================================================ FILE: .labkit_logging_todo.yml ================================================ # LabKit Logging Field Standardization TODO # AUTO-GENERATED FILE. DO NOT EDIT MANUALLY. # # This file tracks deprecated logging fields that need to be migrated to # standard fields defined in gitlab.com/gitlab-org/labkit/v2/fields. # Each offense represents a file using a deprecated field name. # # How to fix: # Replace the string literal with the constant from the fields package. # e.g. log.WithField("source_ip", ...) → log.WithField(fields.RemoteIP, ...) # # Adding offenses when an immediate fix is not possible: # go get gitlab.com/gitlab-org/labkit/v2/cmd/validate-log-fields # go run gitlab.com/gitlab-org/labkit/v2/cmd/validate-log-fields -update-todo # go mod tidy # # Regenerate entire TODO: # Delete this file, then run the command above. --- offenses: - callsite: common/build.go deprecated_field: error standard_field: fields.ErrorMessage - callsite: executors/docker/docker.go deprecated_field: error standard_field: fields.ErrorMessage - callsite: executors/docker/machine/provider.go deprecated_field: duration standard_field: fields.DurationS - callsite: network/retry_requester.go deprecated_field: duration standard_field: fields.DurationS ================================================ FILE: .markdownlint-cli2.yaml ================================================ --- # Base Markdownlint configuration # Extended Markdownlint configuration in docs/.markdownlint/ # See https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md for explanations of each rule customRules: - "./docs/.markdownlint/rules/unnecessary_traversal.js" config: # First, set the default default: true # Per-rule settings in alphabetical order code-block-style: # MD046 style: "fenced" emphasis-style: false # MD049 header-style: # MD003 style: "atx" hr-style: # MD035 style: "---" line-length: # MD013 code_blocks: false tables: false headings: true heading_line_length: 100 line_length: 800 no-duplicate-heading: # MD024 siblings_only: true no-emphasis-as-heading: false # MD036 no-inline-html: false # MD033 no-trailing-punctuation: # MD026 punctuation: ".,;:!。,;:!" no-trailing-spaces: false # MD009 ol-prefix: # MD029 style: "one" reference-links-images: false # MD052 ul-style: # MD004 style: "dash" table-column-style: false # MD060 # Keep this item last due to length proper-names: # MD044 code_blocks: false html_elements: false names: [ "Akismet", "Alertmanager", "AlmaLinux", "API", "Asana", "Auth0", "Azure", "Bamboo", "Bitbucket", "Bugzilla", "CAS", "CentOS", "Consul", "Debian", "DevOps", "Docker", "DockerSlim", "Elasticsearch", "Facebook", "fastlane", "fluent-plugin-redis-slowlog", "GDK", "Geo", "Git LFS", "git-annex", "git-credential-oauth", "git-sizer", "Git", "Gitaly", "GitHub", "gitlab-duo", "gitlab.vim", "GitLab chart", "GitLab Geo", "GitLab Monitor", "GitLab Operator", "GitLab Pages", "GitLab Rails", "GitLab Runner", "GitLab Shell", "GitLab Workhorse", "GitLab", "Gitleaks", "Gmail", "Google", "Grafana", "Gzip", "Helm", "HipChat", "ID", "IP", "Ingress", "jasmine-jquery", "JavaScript", "Jaeger", "Jenkins", "Jira", "Jira Cloud", "Jira Server", "jQuery", "JSON", "JupyterHub", "Karma", "Kerberos", "Knative", "Kubernetes", "LDAP", "Let's Encrypt", "Markdown", "markdownlint", "Mattermost", "Microsoft", "minikube", "MinIO", "ModSecurity", "Neovim", "NGINX Ingress", "NGINX", "OAuth", "OAuth 2", "OmniAuth", "OpenID", "OpenShift", "PgBouncer", "Postfix", "PostgreSQL", "PowerShell", "Praefect", "Prometheus", "Puma", "puma-worker-killer", "Python", "Rake", "Redis", "Redmine", "reCAPTCHA", "Ruby", "runit", "Salesforce", "SAML", "Sendmail", "Sentry", "Service Desk", "Sidekiq", "Shibboleth", "Slack", "SMTP", "SpotBugs", "SSH", "Tiller", "Tiptap", "TOML", "Trello", "Trello Power-Ups", "TypeScript", "Twitter", "Ubuntu", "Ultra Auth", "Unicorn", "unicorn-worker-killer", "URL", "WebdriverIO", "YAML", "YouTrack" ] ================================================ FILE: .mockery.yaml ================================================ all: true dir: '{{.InterfaceDir}}' filename: 'mocks.go' structname: "{{.Mock}}{{.InterfaceName | firstUpper}}" pkgname: '{{.SrcPackageName}}' template: testify template-data: unroll-variadic: true packages: gitlab.com/gitlab-org/gitlab-runner: config: exclude-subpkg-regex: - executors/internal/autoscaler recursive: true ================================================ FILE: .tool-versions ================================================ # renovate: datasource=docker depName=golang allowedVersions=/1\.26\..+/ golang 1.26.1 yq 4.44.3 mockery 2.53.3 mage 1.15.0 # For linting documentation markdownlint-cli2 0.19.0 lychee 0.21.0 vale 3.13.0 ================================================ FILE: .vale.ini ================================================ # Vale configuration file. # # For more information, see https://vale.sh/docs/vale-ini. StylesPath = docs/.vale MinAlertLevel = suggestion IgnoredScopes = code, text.frontmatter.redirect_to [*.md] BasedOnStyles = gitlab_base, gitlab_docs # Ignore SVG markup TokenIgnores = (\*\*\{\w*\}\*\*) ================================================ FILE: AGENTS.md ================================================ # GitLab Runner — AI Agent Instructions This file provides context for AI agents operating on this repository. All agent reasoning, analysis, and action plans should be written to stdout. Do not post comments to issues or merge requests during the fix process. ## Codebase overview **Language:** Go | **Min version:** see `go.mod` | **Default branch:** `main` | Package | Purpose | |---|---| | `executors/` | Executor implementations: `docker`, `kubernetes`, `shell`, `ssh`, `instance`, `custom` | | `commands/` | CLI entry points: `run`, `register`, `exec`, `artifacts-downloader`, etc. | | `network/` | GitLab API client: job polling, artifact upload, trace streaming | | `helpers/` | Shared utilities, retry logic, process management, file operations | | `shells/` | Shell script generation: Bash, PowerShell, CMD | | `cache/` | Cache backends: S3, GCS, Azure | | `common/` | Core types: `Config`, `Runner`, `Build`, `JobResponse`, `Network`, `Executor` | | `referees/` | Metric collection during job execution | ## Deprecated features — do not invest in fixes - **docker+machine executor** — deprecated GitLab 17.5, removal GitLab 20.0 (May 2027) - **`gitlab-runner exec` command** — deprecated, scheduled for removal When asked to fix a bug in these features: log the deprecation status and migration path to stdout, then exit. Do not create branches or MRs. ## Coding standards - Follow existing patterns in the file you are editing - Error wrapping: `fmt.Errorf("context: %w", err)` not `errors.Wrap` - Logging: use the structured logger (`logrus`) already imported in the file - Tests: table-driven tests using `testify/require` and `testify/assert` - Mocks: generated with `mockery` — check `//go:generate` directives before writing manual mocks - Do not modify `go.mod` / `go.sum` unless the fix genuinely requires a new dependency - Do not refactor code unrelated to the bug being fixed ### Fix the root cause, not a downstream symptom Before writing any code, identify the specific function, call site, and ordering where the invariant breaks. State it in your commit message. Do not patch a proxy or downstream layer when the root cause is accessible — downstream patches leave the original bug in place and create two code paths to maintain. ### Fix the general case, not just the reported input When a bug is reported for one specific value (e.g. a variable name with a dash, or one specific `GIT_STRATEGY`), examine the full input domain and fix the general case. Patching only the reported example creates false confidence and deferred failures for inputs in the same class. Your fix must be at least as broad as the problem domain. ### Reuse existing helpers — search before adding Before writing a new helper function, search the package for an existing one: `grep -rn "concept\|related_term" ./package/`. If a correct helper already exists, use it. If you do introduce a new function, state in a comment why the existing helpers were insufficient. Duplicating logic is a maintenance liability and a code smell. ### Match the nil-vs-zero-value return contract In Go, `nil` and `&ZeroStruct{}` are semantically distinct. Before adding a return statement to an existing function, audit every other return site and confirm the contract: does the caller distinguish error from success via a nil check or by inspecting fields? Returning a non-nil zero-value struct where callers expect nil on error can trigger downstream panics (e.g. `if result.ID == 0 { logrus.Panicln(...) }`). When in doubt, return `nil` on failure. ## Verification After making changes, always run these in order before pushing. All must pass: - `make tools` — installs golangci-lint and other dev tools into `.tmp/bin/` (required before linting) - `make development_setup` — sets up local git repo fixtures needed by some tests (idempotent) - `go build ./...` — must compile clean - `go vet ./...` — must pass clean - `go test -race ./... -count=1 -timeout 30m` — fix any failures your changes introduced; `-race` adds ~10× overhead so the timeout must be generous - `make lint` — runs golangci-lint via the Makefile (version is pinned in `GOLANGLINT_VERSION` in the Makefile; always use `make lint` rather than calling the binary directly so the pinned version is used) Do not log `CI_JOB_TOKEN`, API tokens, or any secret value to stdout. If you need to diagnose an authentication failure, log the HTTP status code and response body only. Some tests require a live Docker daemon, Kubernetes cluster, or real GitLab instance and will fail in CI. These are expected — log them explicitly and continue. Do not treat pre-existing infrastructure-dependent failures as blockers. ## Commit and branch conventions - Branch name: `fix/issue-{IID}-short-kebab-description` - Commit message: `fix: imperative description (closes #{IID})` - MR description must explain root cause and the fix, not just what changed ## Bug triage — when to stop Stop and log reasoning without creating an MR when: - The bug affects a deprecated executor or command (see above) - The root cause cannot be determined from available context - The fix would require changes across more than 5 files or touches core architecture - The issue has a `security` label — these require human review - The issue has a `customer` or `priority::1` / `priority::2` label — flag for @adebayo_a ## Focus discipline **Do not fix unrelated CI pipeline failures.** If the repository's CI pipeline is failing for a reason unrelated to the issue you were assigned, note it in the MR description and continue with the assigned fix. Do not open branches to repair CI unless the failing pipeline was explicitly introduced by your own changes. ## Patterns from past fixes Use this section during research to recognise familiar bug classes before diving into code. ### Context handling - Always return the deadline context error, not the parent context error. In retry/backoff loops, `ctx.Err()` on the wrong context is a recurring mistake. - Replace `time.After` in loops with `time.NewTimer` + explicit `Stop()`. `time.After` leaks timers until they fire; in retry loops this causes unnecessary allocations and delayed cancellation. (MR !6064) ### Data races - Shared state accessed from goroutines must be protected with a mutex or communicated via channels. The WebSocket tunnel and the runner fleet scheduler are known areas where races have occurred. Always run `go test -race` before pushing. (MR !6237) ### String encoding and filenames - File names passed to archive headers (gzip, zip, tar) must be sanitised before use. Non-ASCII characters cause latin-1 encoding errors in gzip headers. Use the existing sanitisation helper in `helpers/` rather than passing raw paths. (MR !6487) ### Configuration changes - New config fields that change existing behaviour must default to preserving the old behaviour. Never change the meaning of an existing field's zero value — that is a breaking change. (MR !6081) ### S3 / cache errors - S3 403 errors often mean missing session token, not bad credentials. Check whether the credentials chain includes a session token and ensure it is forwarded. (MR !6376, !6472) ### Nil guards - `filepath.Walk` can pass a nil `FileInfo` when it encounters a permission error. Always nil-check `FileInfo` before accessing its methods. (MR !6050) ### PowerShell variable name escaping - When generating PowerShell variable references, any name containing characters outside `[a-zA-Z0-9_]` (dashes, dots, spaces, etc.) must use `${name}` syntax. Bare `$name` is invalid for such names — PowerShell parses `$MY-VAR` as `($MY) - (VAR)`, producing a syntax error or wrong value. - Use a regex guard `[^a-zA-Z0-9_]` — not just `strings.Contains(name, "-")`. Dots, spaces, and other special characters trigger the same problem. ### Symlink traversal - `filepath.Walk` does not follow directory symlinks. When walking artifact paths, glob results, or any user-specified path, check whether the walk root is a symlink and resolve it with `filepath.EvalSymlinks` before calling Walk. - **Always include cycle detection**: use a `visited map[string]struct{}` keyed on real (resolved) paths. A circular symlink without detection causes an infinite loop. Failing to detect cycles is a correctness bug, not a performance concern. - **Always add a cycle termination test**: name it `Test_CycleDoesNotHang`, construct an actual circular symlink in a temp dir, and assert the function returns within a reasonable deadline. An untested cycle path is a production denial-of-service risk in multi-tenant CI environments. ### Feature flags and git strategy completeness - When a feature flag controls behaviour that runs in a switch over `GetGitStrategy()`, check that every strategy branch (`GitClone`, `GitFetch`, `GitEmpty`, `GitNone`) is handled. A branch that returns early or logs "skipping" without doing the work silently breaks the feature flag for that strategy. ### Error return semantics — nil beats zero-value struct - When a network call fails or returns undecodable data, return `nil`, not a zero-value struct. Callers use nil checks to detect failure; a non-nil struct with all-zero fields (e.g. `ID: 0`) can be mistaken for success and trigger downstream panics in code that expects non-nil only on success. - When adding a guard to a legacy fallback path (e.g. content-type checks before re-issuing a request), ensure the guarded-off path returns `nil`, not the fallback result. ### Runtime identity over compile-time config - When guarding privileged shell operations (e.g. `chown`), prefer a runtime check (`[ "$(id -u)" = "0" ]`) over a compile-time check of Kubernetes security context fields. Security context fields may not reflect reality: pods can run as non-root via Docker `--user`, admission webhooks, or other mechanisms not captured in the runner config. The runtime check is always ground truth. ### Git config file ownership - `git config --global` resolves to `$HOME/.gitconfig`. When jobs run as non-root users, `$HOME` is often `/root` (owned by root), causing "Permission denied". Before any `git config --global` call, export `GIT_CONFIG_GLOBAL` pointing to a writable temp file under the runner's temp directory. Clean it up in the job cleanup script alongside other temp files. ### Variable expansion in secrets and external paths - CI/CD variable references (`$VAR_NAME`) in fields like Vault secret paths, clone paths, and external URLs must be expanded before the value is used. Search the call site for an existing `ExpandVariables` / `Expand` utility — do not inline string replacement. If the expansion happens inside a shared interface method, move it there so all callers benefit and the contract is enforced centrally. - When a path read from an env file is used as a working directory or as the argument to `os.RemoveAll`, validate it with `filepath.Rel(rootDir, path)` and reject paths that escape the root (i.e. `strings.HasPrefix(rel, "..")`). This guards against path-traversal via a malicious or misconfigured pre-clone script. ### Scope of a typical fix Most merged bug fixes change 1–3 files and under 30 lines net. If your proposed fix is larger than this, re-examine whether you are solving the right problem or inadvertently refactoring. Flag it in the log and stop if scope has grown beyond a targeted fix. ================================================ FILE: CHANGELOG.md ================================================ ## v18.11.1 (2026-04-20) ### Bug fixes - Merge branch 'security-fix-k8s-uid-gid-root-bypass' into 'main' [!6643](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6643) ## v18.11.0 (2026-04-16) ### New features - Consolidate the HTTP Status Code field [!6492](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6492) - Change the concrete helper image to use shell-form CMD [!6591](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6591) - Cache AssumeRole credentials to reduce STS requests [!6549](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6549) - Implement Concrete CI Function [!6410](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6410) - Add logging field validator CI job [!6580](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6580) - Fix default artifacts upload timeout values [!6584](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6584) - Add k8s nodename to pod phase output [!6311](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6311) (Thorsten Banhart @banhartt) - Add native steps job counter metric [!6369](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6369) - Bundle git and CA certificates for concrete runner [!6504](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6504) - Update builtins to use step-runner BuiltinContext interface [!6616](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6616) - Add seccomp and AppArmor profile support to Kubernetes executor security context [!6512](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6512) (Marc Ullman @MarcUllman) - Kubernetes: add PodDisruptionBudget support for job pods [!6331](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6331) - Pass socket path from step-runner serve to proxy command [!6507](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6507) - Resolve "Windows Runners: Document "session 0" restrictions (screen resolution statically set to 1024x768)" [!4994](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4994) - Kubernetes: autoscaler for idle capacity via pause pods [!6334](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6334) ### Bug fixes - Update FF_SCRIPT_SECTIONS documentation to reflect current behavior [!6519](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6519) (Pishel65 @pishel65) - Rate-limit and instrument S3 AssumeRole calls [!6528](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6528) - Cache/s3v2: cache S3 client to reduce IMDS requests [!6530](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6530) - PowerShell/Pwsh environment variables can't process special characters in their names. [!6502](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6502) (Pishel65 @pishel65) - Fix proxy-mask credential store file permissions on Unix [!6510](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6510) - Fix disable_cache disabling all volumes instead of only cache [!6552](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6552) (Aaron Döppner @aarondpn-sp) - Restore fixed runner command path [!6529](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6529) - Use custom endpoint in detectBucketLocation [!6532](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6532) - Log warning when DOCKER_AUTH_CONFIG credentials resolution fails [!6578](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6578) - Properly escape ANSI color codes in shell scripts [!6527](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6527) - Improve step_script to bring it on part with Runner legacy path [!6596](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6596) - Revert "Remove GPG signing color" [!6554](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6554) ### Maintenance - No global executors [!6508](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6508) - Patch(cache): ensure cache exists before uploading [!6569](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6569) - Highlight actively developed executors [!6585](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6585) - Bump up runner images version to 0.0.38 [!6541](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6541) - Cache: rename local artifact when FF_HASH_CACHE_KEYS is toggled [!6546](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6546) - Update Windows backward compatibility support [!6523](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6523) (Pishel65 @pishel65) - Add Support for Windows Server 24H2 [!6522](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6522) (Pishel65 @pishel65) - Add Pipeline Security group as code owners for secrets managers [!6474](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6474) - Link to main branch for runner-helper Dockerfiles [!6533](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6533) (Sven Hoexter @hoexter) - Standardize runner capitalization in Docker Machine autoscale docs [!6615](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6615) - Adding test for new alert format [!6550](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6550) - Update CI components [!6517](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6517) - Fix autoscale documentation typos [!6611](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6611) (Bob Singh @bobsingh.dev) - Add boundary test cases for statusClass [!6551](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6551) (Bob Singh @bobsingh.dev) - Pilot runners failover [!6536](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6536) - Remove all references to PackageCloud [!6514](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6514) - Add Duo Workflow agent configuration and instructions [!6588](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6588) - Properly support Job Router FF from runner config [!6545](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6545) - AI Translated Documentation Push: GITTECHA-610 [!6577](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6577) - Docs(docker-machine): update docs [!6534](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6534) - Creating documentation about Windows helper images [!6525](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6525) (Pishel65 @pishel65) - Documented fallback correlation ID [!6531](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6531) - Docs maintenance: Fix and update broken URLs [!6526](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6526) - Revert "Merge branch 'malvarez-consolidate-http-status-code-field' into 'main'" [!6524](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6524) - Removing the mention of packagecloud [!6582](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6582) - Fix alert boxes in translated documentation [!6595](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6595) - Changing warning format [!6539](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6539) - Rename Kubernetes Agent Server to GitLab Relay (KAS) [!6583](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6583) - Document interactive desktop requirement for Windows GUI tests [!6571](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6571) - Clarify post_build_script and after_script execution behavior [!6573](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6573) - Update RPM package naming from amd64 to x86_64 [!6543](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6543) - Updating note format [!6537](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6537) - Tidy up Markdown in documentation [!6520](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6520) ## v18.10.1 (2026-04-05) ### New features - Fix default artifacts upload timeout values [!6584](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6584) ## v18.10.0 (2026-03-16) ### New features - Upgrade step-runner to v0.30.0 [!6441](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6441) - Add volume_keep option to Docker executor [!6490](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6490) - Ensure subprocess termination if GitLab Runner exits on Windows [!6500](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6500) (Ilan Godik @NightRa) - Update policy-related logs to be more generic [!6445](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6445) - Docker+machine: add shutdown drain for idle machines [!6330](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6330) - Support environment variable expansion in runner token and URL [!6068](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6068) - Add artifact upload timeouts [!5900](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5900) - Teach runner how to set pod-level resources for build pods [!5922](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5922) (Stéphane Talbot @stalb) - Add support for interactive web terminal in docker for PowerShell and Pwsh [!6363](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6363) (Pishel65 @pishel65) - Add install instructions for the step-runner [!6420](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6420) - Build gitlab-runner-windows-arm64 executable [!6495](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6495) (Bruno @brunvonlope) ### Bug fixes - Fix failing tests for autoscaler due to taskscaler udpate [!6434](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6434) - [Commander]: Fix process handle leak when using Windows Jobs [!6498](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6498) (Ilan Godik @NightRa) - Bump runner images version [!6429](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6429) - Fix proxy-mask credential store file permissions on Unix [!6510](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6510) - Runner-wrapper: buffer errCh to avoid goroutine leak on shutdown [!6337](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6337) (Emmanuel 326 @Emmanuel326) - Force authenticated calls to Gitaly on public projects [!6444](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6444) - Add helpful error message for S3 403 Forbidden in cache extractor [!6472](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6472) - Fix non-latin-1 string error when uploading artifact [!6487](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6487) - Avoid breaking change when script syntax is invalid when no inputs used [!6417](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6417) - Upgrade gitlab.com/gitlab-org/moa to fix unmatched template expressions [!6513](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6513) ### Maintenance - Go: Update module github.com/Azure/azure-sdk-for-go/sdk/storage/azblob to v1.6.4 [!6405](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6405) - Update alert box style, runner docs 1 [!6451](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6451) - Go: Update module github.com/aws/aws-sdk-go-v2/service/secretsmanager to v1.41.1 [!6403](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6403) - Go: Update module github.com/aws/aws-sdk-go-v2 to v1.41.1 [!6394](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6394) - Go: Update module github.com/klauspost/compress to v1.18.4 [!6406](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6406) - AI Translated Documentation Push: GITTECHA-581 [!6501](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6501) - Add Troubleshooting mention for errors caused by azure overprovisioning [!6430](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6430) - AI Translated Documentation Push: GITTECHA-563 [!6470](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6470) - Consolidate build URL helpers into helpers/url with auth flag [!6483](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6483) - Make hosted-runners-bridge job dependent on pulp release [!6467](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6467) - Go: Update module github.com/sirupsen/logrus to v1.9.4 [!6408](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6408) - Go: Update module github.com/aws/aws-sdk-go-v2/service/sts to v1.41.7 [!6404](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6404) - Go: Update module cloud.google.com/go/storage to v1.60.0 [!6416](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6416) - Go: Update module github.com/openbao/openbao/api/v2 to v2.5.1 [!6407](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6407) - Go: Update gitlab.com/gitlab-org/fleeting/fleeting digest to 1389ec0 [!6421](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6421) - Prevent bleeding-edge jobs from running on CC [!6437](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6437) (Touni Atchadé @oratchade) - Go: Update gitlab.com/gitlab-org/fleeting/fleeting/metrics/prometheus digest to 5362476 [!6384](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6384) - AI Translated Documentation Push: GITTECHA-549 [!6435](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6435) - Update module google.golang.org/protobuf/cmd/protoc-gen-go to v1.36.11 [!6415](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6415) - Go: Update module github.com/bmatcuk/doublestar/v4 to v4.10.0 [!6425](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6425) - Go: Update module github.com/aws/aws-sdk-go-v2/credentials to v1.19.10 [!6396](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6396) - Go: Update module github.com/aws/aws-sdk-go-v2/service/s3 to v1.96.0 [!6419](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6419) - Go: Update gitlab.com/gitlab-org/fleeting/fleeting/metrics/prometheus digest to 9c980c4 [!6402](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6402) - Remove automaxprocs since it's not necessary with Go 1.25 [!6479](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6479) - Prevent CACHE_FALLBACK_KEY from bypassing protection on Windows [!6440](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6440) - Go: Update github.com/johannesboyne/gofakes3 digest to 4c385a1 [!6380](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6380) - Log collection cleanups [!6373](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6373) - Update github.com/santhosh-tekuri/jsonschema to v6 [!6499](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6499) - Update module github.com/vektra/mockery to v3.6.4 [!6413](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6413) - Update localization team owners in CODEOWNERS [!6355](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6355) - Update dependency danger-review to v2.1.0 [!5936](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5936) - Go: Update gitlab.com/gitlab-org/fleeting/fleeting/metrics/prometheus digest to 1389ec0 [!6422](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6422) - Remove all references to PackageCloud [!6514](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6514) - Refactor extract cache key sanitization into dedicated package [!6509](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6509) - Update LabKit Version [!6491](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6491) - Go: Update gitlab.com/gitlab-org/fleeting/taskscaler digest to b5a1223 [!6385](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6385) - Go: Update gitlab.com/gitlab-org/fleeting/taskscaler/metrics/prometheus digest to 3fd95b0 [!6386](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6386) - Exclude image and development files from docs-locale link validation [!6464](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6464) - Chore(metrics): gitlab_runner_jobs_total init value [!6469](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6469) - Update in-toto library [!6481](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6481) - Update dependency ruby to v3.4.8 [!6460](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6460) - Improve macOS runner installation page [!6438](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6438) - Go: Update gitlab.com/gitlab-org/fleeting/taskscaler/metrics/prometheus digest to 891f7bc [!6423](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6423) - Avoid bare URLs in YAML frontmatter in documentation [!6397](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6397) - Refactor cache functionality to remove common package dependency [!6366](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6366) - Update GPG public key [!6468](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6468) - Remove GPG signing color to allow pilot runners to execute package jobs [!6486](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6486) - Go: Update module k8s.io/client-go to v0.35.1 [!6412](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6412) - Go: Update gitlab.com/gitlab-org/fleeting/fleeting digest to 7f6dd45 [!6462](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6462) - Align definition of concurent ID [!6476](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6476) - Fix indentation of list continuation text and link in documentation [!6478](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6478) - Cleanup go.mod and go.sum [!6471](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6471) - Update module google.golang.org/grpc/cmd/protoc-gen-go-grpc to v1.6.1 [!6414](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6414) ## v18.9.0 (2026-02-19) ### New features - Allow passing `env` and `labels` options to `json-file` Docker logging driver [!5638](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5638) (Patrick Decat @pdecat) - Enable Job Inputs feature flag by default [!6275](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6275) - Add CI Jobs to push packages to Pulp [!6073](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6073) - Retry pulp content push commands on specific errors [!6197](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6197) - Instrument input interpolations [!6047](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6047) - Add support for Google Cloud Service universe domain [!6338](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6338) - Upgrade step-runner to version 0.24.0 [!6056](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6056) - Push runner linux packages to Pulp [!6062](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6062) - Add user agent to AWS Secrets Manager integration [!6060](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6060) (derikwang @derik01) - Script function [!6029](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6029) - Filter out obsolete distro releases [!6042](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6042) - Pass job timeout in steps RunRequest so server can also enforce job timeouts [!6375](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6375) - Implement user script to step [!6069](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6069) - Helpers/retry: interrupt backoff sleep on context cancellation [!6061](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6061) (Emmanuel 326 @Emmanuel326) - Update the libvirt use doc [!6034](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6034) (Funning @FunningC0217) - Add zos build tags [!5835](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5835) (Joon Lee @jlee_ibm) - Avoid interpolation without defined job inputs [!6374](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6374) - Job Router client - WebSocket support [!6020](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6020) - Upgrade step-runner to version 0.26.0 [!6351](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6351) ### Security fixes - Update RUNNER_IMAGES_VERSION to 0.0.34 [!6066](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6066) ### Bug fixes - Pass S3 session token for access key credentials [!6376](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6376) - Fix FD exhaustion during retry requests [!6041](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6041) - Fix proxy_exec secret masking permissions [!6044](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6044) - Update pkcs7 library [!6016](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6016) - Refactor Connector to allow setup before connection [!6359](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6359) - Fix WebSocket tunnel data race [!6237](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6237) - Avoid breaking change when script syntax is invalid when no inputs used [!6417](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6417) - Add service container ID hostname when emulating links functionality [!6043](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6043) - Runner_wrapper: fix backoff retry context cancellation handling [!6064](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6064) (Emmanuel 326 @Emmanuel326) - Update the logic for comparing the urls and tokens [!6296](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6296) (Aayush @Aayush-Saini) - Ensure check_interval takes effect and eliminate race condition between fleet of runners [!6081](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6081) (Pishel65 @pishel65) - Guard against nil FileInfo in filepath.Walk [!6050](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6050) (Bob Singh @bobsingh.dev) ### Maintenance - Adds MR Review instructions focused around Log Field Standardisation [!6353](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6353) - Rename job router RPC package [!6049](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6049) - Update linting configuration from GitLab project [!6352](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6352) - De-duplicate kube warning events [!5926](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5926) - Add command to sync go version in other files [!6378](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6378) - Bump Go to 1.25.7 and RUNNER_IMAGES_VERSION to 0.0.35 [!6370](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6370) - Update crosslink pointing to docs [!6346](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6346) - Correct runner linux package archs [!6038](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6038) - Add status check to launchctl I/O error troubleshooting [!6358](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6358) - Add mage to project dependencies [!6348](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6348) - Move JobResponse to spec.Job [!6058](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6058) - Update go packages [!6032](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6032) - Switch to a maintained YAML library go.yaml.in/yaml/v3 [!6065](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6065) - Move versions to variables [!6368](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6368) - Add UniverseDomain configuration for GCS cache [!6362](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6362) - Network: stop retry backoff timer on context cancellation [!6063](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6063) (Emmanuel 326 @Emmanuel326) - Clarify Bash requirement for GitLab Runner shell executor on macOS [!6350](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6350) - Fix miscellaneous Markdown formatting issues [!6347](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6347) - Update redirecting links [!6327](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6327) - Fix supported distros documentation [!6048](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6048) - Kubernetes executor GPU configuration requirements [!6077](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6077) - Use chunk size of 10MB for pulp uploads [!6078](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6078) - Fix headers passed when using CI_JOB_TOKEN [!6075](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6075) - Rename GITLAB_TOKEN to GITLAB_TEST_TOKEN in test utilities [!6045](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6045) (Aayush @Aayush-Saini) - AI Translated Documentation Push: GITTECHA-544 [!6360](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6360) - Include runner_name in all relevant log lines [!5883](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5883) - Add pod/container name to build logger fields [!5891](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5891) - Add diagnostics logging for S3 cache AssumeRole operations [!6345](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6345) - Remove EOL spaces in doc files - 2026-01-28 [!6326](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6326) - Incorporate additional change from GitLab project [!6357](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6357) - Document emulated docker links caveats [!6054](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6054) - Restore environment variables to build container [!6333](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6333) ## v18.8.0 (2026-01-15) ### New features - Job Router [!5945](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5945) - Implement mage pulp:supportedOSVersions target [!6024](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6024) - Improve Portability of Git Version Check for z/OS [!6001](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6001) (Kai McGregor @kmcgreg-ibm) - Introduce better job inputs interpolation error [!6014](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6014) - Emulate deprecated Docker links functionality with ExtraHosts [!5980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5980) - Mage target to create Pulp CLI configuration [!6039](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6039) ### Bug fixes - Ensure buildlogger uses available masks (issue reported by Christian Sousa from Blue Origin Manufacturing, LLC) [!5909](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5909) - Support Git submodules with different hosts via RepoURL insteadOf [!6025](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6025) - Add IPv6 address when emulating links functionality [!6027](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6027) - Cleanup dangling virtualbox resources [!5941](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5941) - Add `-protected` suffix to docker cache volumes if any of the cache keys include the `-protected` suffix [!6021](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6021) - Fix connector interface not being exposed for docker+machine and docker-autoscaler executors [!6015](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6015) ### Maintenance - Fix service container log collection wait time [!6019](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6019) - Fix race condition in TestDockerCommandWithRunnerServiceEnvironmentVariables [!6018](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6018) - Fix a typo in the GitLab Runner system requirements page [!6031](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6031) - Fix privileged setting for general Podman usage [!6023](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6023) - Update Golang to 1.25.3 [!5978](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5978) - Make Alpine 3.21 the default base for helper images [!5995](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5995) - docs: Replace `curl | bash` commands with safer steps [!6036](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6036) (Yasssmiine @Yasssmiine-x) - Remove alpine 3.19 [!5993](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5993) - Restructure GitLab Runner installation documentation with card-based navigation [!6030](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6030) - Clarify details about arm helper image, cleanup extra wording, fix link [!6012](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6012) (Ben Bodenmiller @bbodenmiller) - Allow the i18n lint paths job to fail [!6017](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6017) ## v18.7.2 (2026-01-08) ### Bug fixes - Support Git submodules with different hosts via RepoURL insteadOf [!6025](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6025) ## v18.7.1 (2025-12-23) ### Bug fixes - Add IPv6 address when emulating links functionality [!6027](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6027) ## v18.7.0 (2025-12-18) ### New features - Add reservation throttling config option [!6010](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6010) - Introduce first iteration of job inputs interpolation behind FF [!5855](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5855) - Emulate deprecated Docker links functionality with ExtraHosts [!5980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5980) ### Bug fixes - Fix connector interface not being exposed for docker+machine and docker-autoscaler executors [!6015](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6015) - Do not fail install if gitlab-runner service commands not available [!5948](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5948) - Fix shell executor not working with variables that use file variables [!5958](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5958) - Configure submodules to inherit parent repository credentials [!5962](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5962) - Fix "unable to get password from user" errors in shell executor [!5961](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5961) - Fix handling of relative builds dir [!5977](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5977) - Ensure buildlogger uses available masks (issue reported by Christian Sousa from Blue Origin Manufacturing, LLC) [!5909](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5909) - Fix clear-docker-cache script for Docker 29 [!5969](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5969) - Fix bash shell cleanup to support variable expansion in paths [!5966](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5966) - Ignore user-defined AWS_PROFILE variable in cache uploads [!5986](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5986) - Fix misleading retry message when GET_SOURCES_ATTEMPTS=1 [!5998](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5998) - Support resolving Windows 10.0.26200 helper image [!5984](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5984) ### Maintenance - Add dashboard generation process and usage guidance [!5989](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5989) - Remove CertificateDirectory global [!5956](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5956) - Docs metadata update for group change from Deploy/Environments -> Verify/Runner Core [!5955](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5955) - Bump golang.org/x/crypto [!5991](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5991) - Enable log timestamps by default [!5861](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5861) - AI Translated Documentation Push: GITTECHA-373 [!5934](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5934) - Bump gitlab.com/gitlab-org/fleeting/taskscaler to pull a fix [!5999](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5999) - Fix flaky tests [!5994](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5994) - Regenerate mocks [!5974](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5974) - Bump runner image version [!6007](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6007) - Recursively set up Git submodules credentials [!5997](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5997) - Roll documentation linting tool versions forward [!5954](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5954) - Shorten headers to avoid markdownlint exclusion code (Runnner) [!5951](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5951) - Use CI_RUNNER_VERSION for arm helper image [!6004](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6004) (Ben Bodenmiller @bbodenmiller) - Bump step-runner to v0.20.0 [!5970](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5970) - AI Translated Documentation Push: GITTECHA-371 [!5932](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5932) - Fix typo of libvirt [!5953](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5953) - Remove EOL spaces in doc files - 2025-11-17 [!5952](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5952) - Fix service container log collection wait time [!6019](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6019) - Document architecture support in `gitlab-runner-helper-images` package [!5976](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5976) - Update GitLab Runner developer docs [!5853](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5853) - Ensure `stable docker images` only runs after all tests pass [!5990](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5990) - Update Kubernetes client-go library to 0.32.10 [!5929](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5929) (Stéphane Talbot @stalb) - Refactor commands, construct a single GitLab client in a single place [!5950](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5950) - Fix race condition in TestDockerServiceHealthcheckOverflow [!5985](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5985) - Tidy go mod [!5973](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5973) - AI Translated Documentation Push: GITTECHA-375 GITTECHA-420 [!5938](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5938) - Docs(docker-machine): update docs [!6006](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6006) - Fix a teeny-tiny typo in runner `common/config.go` [!5967](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5967) (Sadra Barikbin @s.barikbin) - Misc refactors [!5949](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5949) - Include go.mod changes when checking modules in pipeline [!5975](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5975) - Fix toml spacing inconsistencies in k8s runner docs [!6003](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6003) (Ben Bodenmiller @bbodenmiller) - Update instructions after UI redesign [!6000](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6000) - Warn users about legacy /ci URL suffix in runner configuration [!5988](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5988) - Update to Go 1.24.11 [!5992](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5992) - Update supported OS distro/version docs [!5959](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5959) - Fix flaky TestCredSetup with -race by removing CI_DEBUG_TRACE [!5987](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5987) - AI Translated Documentation Push: GITTECHA-372 [!5933](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5933) - Allow overriding git credentials in shell integration tests [!5982](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5982) ### Documentation changes - Clarify documentation describing configuration for AWS ASGs with Docker autoscaler [!5996](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5996) (Dan Puttick @dan_oklo) ## v18.6.6 (2025-12-09) ### Bug fixes - Ignore user-defined AWS_PROFILE variable in cache uploads [!5986](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5986) ### Maintenance - Fix flaky tests [!5994](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5994) - Update to Go 1.24.11 [!5992](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5992) - Ensure `stable docker images` only runs after all tests pass [!5990](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5990) ## v18.6.4 (2025-12-05) ### Bug fixes - Fix handling of relative builds dir [!5977](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5977) ## v18.6.3 (2025-11-28) ### Bug fixes - Configure submodules to inherit parent repository credentials [!5962](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5962) - Fix bash shell cleanup to support variable expansion in paths [!5966](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5966) ## v18.6.2 (2025-11-25) ### Bug fixes - Fix "unable to get password from user" errors in shell executor [!5961](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5961) ## v18.6.0 (2025-11-17) ### New features - Functions subcommands in runner binary [!5875](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5875) - Add namespace support to GitLab Secrets Manager [!5918](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5918) - Disallow shim execution mode if executor supports native steps execution [!5898](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5898) - Add MachineOptionsWithName configuration option [!5920](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5920) - Add slot-based cgroup support for Docker executor [!5870](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5870) - Add LoongArch (loong64) build support [!5800](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5800) (Mingcong Bai @MingcongBai) - Bootstrap gitlab-helper-binary for Docker [!5892](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5892) - Add correlation_id to "Update job..." log line [!5887](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5887) ### Bug fixes - Retry etcd request timeout error in Kubernetes executor [!5877](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5877) - Always pass as a file for custom executor [!5904](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5904) - Handle unexpected panics in trace buffer [!5890](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5890) - Fix removing files recursively for bash on z/OS [!5623](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5623) (Kai McGregor @kmcgreg-ibm) - Externalize git configuration [!5912](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5912) - Fix job logs duplicating as service logs [!5863](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5863) (Markus Kaihola @makeri89) - Expand variables in `image.docker.platform` before pulling images [!5897](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5897) (Bert Wesarg @bertwesarg) ### Maintenance - Update Vale rules from GitLab project [!5884](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5884) - AI Translated Documentation Push: GITTECHA-374 [!5935](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5935) - Add troubleshooting docs for services on windows k8s executor [!5913](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5913) (Erik Petzold @erik.petzold1) - Move build execute prepare/user scripts to their own functions [!5893](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5893) - Docs: Add note for PowerShell versions in the custom executor [!5894](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5894) - Steps execution via Connect() [!5927](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5927) - Clarify docs for supported caching feature [!5910](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5910) - Pull in fixes for CVEs [!5895](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5895) - Translation Push - All - For English Anchor LInks [!5896](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5896) - Integration Tests for GCP Secrets Manager [!5881](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5881) - Use passed context in NewStepsDocker.Exec() [!5915](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5915) - Remove obsolete code [!5902](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5902) - Build linux/riscv64 platform for registry.gitlab.com/gitlab-org/gitlab-runner image [!5923](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5923) (Ludovic Henry @luhenry) - docs(docker.md): add podman selinux mcs section [!5879](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5879) (vtardiveau @vtardiveau) - Verify all: only use creds for local images [!5914](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5914) - Update cache S3 SSE Key ID docs [!5919](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5919) - Add warning regarding podman and GPUs [!5937](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5937) - AI Translated Documentation Push: GITTECHA-370 [!5917](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5917) - Fix typo [!5924](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5924) - Make structured "job finished" log line with failure_reason and exit_code [!5885](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5885) - [steps] Deflake steps command tests [!5905](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5905) - Add s3:ListBucket to the required permissions for IAM role to access S3 bucket [!5903](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5903) - docs: Update docker-machine version [!5899](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5899) - Add link to UI redesign doc [!5925](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5925) - Document error when performing sts:AssumeRoleWithWebIdentity for s3 cache [!5921](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5921) - Minor copy edits in runner docs [!5944](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5944) - Add link checking to i18n docs linting [!5943](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5943) - Document Kubernetes CI [!5786](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5786) - Copy edits to runner docs [!5911](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5911) - Pull in some changes from the security fork [!5906](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5906) - Use RFC3339Nano timestamp format for JSON logs [!5888](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5888) - Remove curly brackets from example [!5942](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5942) - chore: refactor TestAttach using canonical client [!5838](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5838) (Muhammad Daffa Dinaya @mdaffad) ## v18.5.0 (2025-10-13) ### New features - Add Kubernetes context support for executor [!5859](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5859) - Add label support to runner configuration [!5802](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5802) - Implement minimal job confirmation API [!5843](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5843) - Update Usage Log with more job context [!5869](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5869) - Add project name to build logging fields [!5846](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5846) ### Bug fixes - Fix logging of duration_s field [!5874](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5874) - Remove duplicate prefix in docker service containers [!5840](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5840) ### Maintenance - Refactor autoscaler provider for readability and update dependencies [!5807](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5807) (Sven Geisler @sge70) - Remove EOL spaces in doc files - 2025-10-07 [!5873](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5873) - Latest Translation Yaml enhancements [!5842](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5842) - Reduce over-linking in GitLab Runner registration documentation [!5834](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5834) - Improve branch selection logic for docs:check Hugo build job [!5866](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5866) - Update Hugo version for Docs test [!5852](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5852) - Update the ubuntu version used as a base image [!5845](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5845) - Docs feedback: Add more context for Parallels executor [!5878](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5878) - Rename Connect() to TerminalConnect() [!5880](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5880) - Improve error logging in docker-machine executor [!5862](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5862) - Update docker device documentation link [!5833](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5833) (Quentin MICHAUD @mh4ckt3mh4ckt1c4s) - Add VMware vSphere to community maintained fleeting plugins [!5818](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5818) (Santhanu V @santhanuv) - Update fleeting plugin and other dependencies [!5830](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5830) - Clean up docs redirects - 2025-09-25 [!5847](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5847) - Fix log field name for docker machine executor [!5860](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5860) - Docs feedback: Clarify SSH `StrictHostKeyChecking` default behavior [!5871](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5871) - Fix duplicate test cases [!5857](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5857) - OKR: Reduce over-linking in GitLab Runner manual installation guide [!5854](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5854) - Chore: rename VersionInfo to Info [!5849](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5849) - Upgrades taskscaler for slot info on no capacity [!5872](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5872) - Docs feedback: Make the executor selection workflow diagram readable [!5876](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5876) - Integration Tests for AWS Secrets Manager [!5841](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5841) - Group/stage change: Verify/Runner -> Verify/Runner Core and CI Functions Platform in the GitLab Runner project [!5858](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5858) ## v18.4.0 (2025-09-12) ### New features - Add support for GIT_CLONE_EXTRA_FLAGS for native git clone [!5809](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5809) (Sven Geisler @sge70) - Cache keys can be hashed [!5751](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5751) - Update step-runner version to 0.16.0 [!5825](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5825) ### Bug fixes - Fix arch label for IBM PPC arch [!5827](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5827) - Make docker volumes really unique [!5783](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5783) - Fix cache key sanitation issues, esp. re. "cache key files" [!5741](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5741) - Update fleeting plugin dependency [!5784](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5784) - [docker] Separate cache volumes for builds against protected and unprotected [!5773](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5773) - Add new ruleset to cover all docs patterns [!5832](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5832) - Upgrade base images to v0.0.26 [!5829](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5829) - Ensure TOML feature flags are used and take precedence over job env [!5782](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5782) - Remove health check from Vault client call [!5803](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5803) - Add timeouts to all docker-machine command executions [!5789](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5789) - Handle config concurrency deadlock with warnings and documentation [!5759](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5759) ### Maintenance - Update docs links [!5814](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5814) - Document community supported plugins [!5532](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5532) - Remove EOL spaces in doc files - 2025-08-26 [!5804](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5804) - Bump golang to 1.24.6 [!5796](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5796) - Implement Kubernetes allowed users/groups in Runner config [!5724](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5724) - Update fleeting plugin and other dependencies [!5823](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5823) - Update API metric description to bring consistency [!5779](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5779) - Add CI to test Hugo build with translated documentation [!5806](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5806) - Add path verifications for localized files - Runner [!5790](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5790) - Refactor registering of commands to be more explicit [!5816](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5816) - Make the `default` helper alpine flavour point to `latest` [!5768](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5768) - Use Hugo 0.148.2 for docs builds [!5815](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5815) - Enable static checks [!5811](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5811) - Fix nanosecond padding of timestamps [!5799](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5799) (Philipp Hahn @pmhahn) - Bump RUNNER_IMAGES_VERSION to 0.0.25 [!5794](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5794) - Revert changes made by Auto Releaser Bot [!5795](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5795) - Migrate golangci lint to version 2 [!5772](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5772) - Cross reference a KB article in the concurrency and limit docs [!5785](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5785) - CI: Skip downloading artifacts of previous jobs [!5808](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5808) (Philipp Hahn @pmhahn) - Use testing linter with t.Context related settings [!5812](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5812) - Add support for signing and notarizing macOS binaries [!5792](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5792) - Change option signature to not return error [!5775](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5775) - Fix flaky unit test with TestDefaultDocker_Exec [!5798](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5798) - Display seconds since epoch using a more widely supported method [!5736](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5736) (Kai McGregor @kmcgreg-ibm) - Fix non-semantic linking word [!5801](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5801) - Fix flaky Docker integration tests [!5797](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5797) ## v18.3.1 (2025-09-04) ### Bug fixes - Remove health check from Vault client call [!5803](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5803) ## v18.3.0 (2025-08-21) ### New features - Add native GitLab Secrets Manager support to GitLab Runner [!5733](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5733) - Add method label to status counter metrics [!5739](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5739) - Add status_class and method label to request duration metrics [!5752](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5752) - Update step-runner version to 0.15.0 [!5757](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5757) - Record request retries. [!5758](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5758) ### Bug fixes - Update fastzip to v0.2.0 [!5778](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5778) - Fix identity for aws_secrets_manager_resolver [!5747](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5747) - Consume docker auth info in order [!5686](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5686) - [docker] Separate cache volumes for builds against protected and unprotected [!5773](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5773) - Add correlation id header to outgoing requests [!5743](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5743) - Add support for 503 http code when the GitLab instance is in maintenance mode [!5685](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5685) - Enable image executor opts in the kubernetes executor [!5745](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5745) - Fix job duration reporting [!5711](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5711) - Update fleeting plugin dependency [!5776](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5776) - Parse the last line of stdout for UID/GID [!5765](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5765) - Fix proxy-exec store temporary directory [!5780](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5780) - Fix cache key sanitation issues, esp. re. "cache key files" [!5741](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5741) - Tighten cache key sanitation [!5719](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5719) ### Maintenance - Add operator pod_spec and deployment_spec docs [!5766](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5766) - Remove unused lock from client struct [!5770](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5770) - Mention that systempaths security_opt is not supported [!5769](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5769) (Andrés Delfino @andresdelfino) - Change link to GA issue for the overwrite pod spec feature [!5732](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5732) - Update a few region/zone IDs in examples [!5720](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5720) - Add a max age of 24h for Kubernetes integration RBAC resources [!5760](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5760) - Chore: Use stable alpine for RISC-V [!5714](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5714) (Aaron Dewes @AaronDewes) - Allow customization of taskscaler and fleeting parameters in config.toml [!5777](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777) (Sven Geisler @sge70) - Move backoff retry logic to retry requester [!5754](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5754) - Refactor gitlab client unregister runner to table tests [!5670](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5670) - Fix localization codeowners [!5712](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5712) - Lbhardwaj/refactor/unregister command methods [!5742](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5742) - Refactor move retry 429 status code logic to one place [!5727](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5727) - Add test for abstract shell guardGetSourcesScriptHooks method [!5702](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5702) - Sync vale rules from main project - Runner [!5753](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5753) - Refactor verify runner tests to table tests and better assertions [!5763](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5763) - Bridge releases with Hosted Runners [!5746](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5746) - Cleanup dead code related to disabled Akeyless secrets integration feature [!5762](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5762) - Drop Alpine Version 3.18 [!5744](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5744) - Refactor kubernetes feature checker tests [!5774](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5774) - Remove EOL spaces in docs [!5749](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5749) - Remove line length rule for markdownlint for i18n files [!5723](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5723) - Fix minor typos with executor interface docs [!5717](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5717) - Correct erroneous compatibility chart features for docker-autoscaler [!5755](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5755) - Docker machine AMI update [!5718](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5718) - Add errorlint linter to golangci-lint settings [!5750](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5750) - Introduce unnecessary-traversal Markdownlint rule to Runner docs [!5735](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5735) - Upgrade prebuilt runner images back to Alpine 3.21 [!5730](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5730) (Aaron Dewes @AaronDewes) - Upstream batch push 2025-07-21 [!5734](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5734) - Refactor errors to wrap errors [!5731](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5731) - Minor grammar updates in GitLab Runner README [!5756](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5756) (Anshi Mehta @anshikmehtaa) - Unregister command unit tests [!5738](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5738) - A bit of general copy edit cleanup [!5740](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5740) - Update index file for getting started [!5722](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5722) - Update hosted runners bridge wiki entry [!5767](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5767) - Minor improvements to runner fleet scaling best practices doc [!5737](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5737) - Add a note about the experiment status of GRIT [!5729](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5729) ## v18.2.0 (2025-07-12) ### New features - Add reference to z/OS on the main runner install page [!5647](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5647) - Thread job request correlation ID to git operations [!5653](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5653) - Add functionality to retrieve secrets from AWS SecretsManager [!5587](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5587) (Markus Siebert @m-s-db) - Update docs on how to use env variables for S3 cache access [!5648](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5648) - Improve runner_name metric label coverage [!5609](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5609) (Josh Smith @jsmith25) - Log usage of deault image for Docker and K8S executors [!5688](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5688) ### Bug fixes - Tighten cache key sanitation [!5719](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5719) - Add troubleshooting guide for GCS workload identity [!5651](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5651) - Skip pre and post checkout hooks for empty [!5677](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5677) - Docs: Fix protocol_port default for SSH [!5701](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5701) - Revert MRs 5531 and 5676 [!5715](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5715) - Reimplement ShortenToken [!5681](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5681) - Put the fips binary in the fips runner image [!5669](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5669) - Set `helper_image_flavor` to `ubi-fips` when fips mode is enabled [!5698](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5698) - Ensure BuildErrors have FailureReason [!5676](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5676) - Fix kubernetes executor helper image override log [!5655](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5655) (Ricard Bejarano @ricardbejarano) - Add `-depth` option to `find -exec rm` invocations [!5692](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5692) ### Maintenance - Add known SELinux issue regarding tmp and pip to the Podman docs [!5661](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5661) - Switch jobs to Kubernetes [!5631](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5631) - Updated the documents [!5596](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5596) (Jithin Vijayan @jithin.vijayan) - Update docker machine versions [!5672](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5672) - Add autoscaler IP address logging options [!5519](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5519) (Brayden White @bwhite117) - Docs: Improve prometheus scraping metrics docs including Operator [!5657](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5657) - Update default ruby version in examples [!5693](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5693) - Add aardvark-dns bug notice to podman guide [!5689](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5689) (Felix @f.preuschoff) - Update region from us-central-1 to eu-central-1 [!5713](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5713) - Run fewer vale lint rules on i18n (translation) docs MRs [!5699](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5699) - Add additonal info about SHA-pinned images [!5700](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5700) - Bump golang to 1.24.4 [!5668](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5668) - chore: refactor TestAttachPodNotRunning [!5650](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5650) (Muhammad Daffa Dinaya @mdaffad) - Update access a private registry from kubernetes executor [!5622](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5622) - Add configure runner on OCI to the index page [!5649](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5649) - Clean up runner docs [!5697](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5697) - Fix OS version package support docs [!5703](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5703) - Add .markdownlint-cli2.yaml for doc-locale [!5690](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5690) - Fix this test [!5682](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5682) - Update mockery to latest version 3.3.4 and generate mocks [!5646](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5646) - Remove outdated information [!5691](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5691) - Update file _index.md [!5665](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5665) - Update SLSA build type documentation [!5639](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5639) - Document tarzstd as an argument for CACHE_COMPRESSION_FORMAT [!5673](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5673) - Clarify documentation on reading S3 credentials from the environment [!5671](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5671) - Add Kubernetes executors docs for helper container memory sizing [!5659](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5659) - Runner doc restructuring: Revamp the admin section index page [!5678](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5678) - Add formatting target for easy fixes with golangci-lint [!5658](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5658) - Add overprovisioning note, fix typos [!5656](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5656) - Refactor gitlab client request job tests to table tests [!5666](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5666) - Fix indent in config.toml example. [!5667](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5667) (Mathieu Gouin @mathieugouin) - Update PowerShell UTF8 integration test [!5493](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5493) (Guillaume Chauvel @guillaume.chauvel) - Update step-runner version to 0.13.0 [!5705](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5705) - Fix docs pipelines for forks [!5664](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5664) - Small typo fixes [!5652](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5652) ## v18.1.0 (2025-06-19) ### New features - Added safety checks for nil sessions and empty endpoints [!5515](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5515) (Zubeen @syedzubeen) - Log a different message for policy jobs with highest precedence [!5628](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5628) - Add adaptive request concurrency [!5546](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5546) - Allow to install/manipulate the gitlab-runner service as a user service (systemd) [!5534](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5534) (Tiago Teixeira @tiago.teixeira.erx) - Bump base images to allow native clone to work [!5561](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5561) - Support user as integer for Docker/Kubernetes executor_opts [!5552](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5552) - Thread job request correlation ID to git operations [!5653](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5653) - make preemptive mode configurable [!5565](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5565) (Pascal Sochacki @pascal.sochacki) - Add queue_size and queue_depth metrics [!5592](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5592) - Log policy job information [!5591](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5591) - Add more request_concurrency related metrics [!5558](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5558) - Enable powershell native clone [!5577](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5577) (Guillaume Chauvel @guillaume.chauvel) - Add support for Overlay Volume Mounts when Podman is used with Docker Executor [!5522](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5522) (Napuu @napuu) ### Bug fixes - Stop following symlinks when archiving documents [!5543](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5543) - Docker+autoscaler: Properly clean up when a job times out or is cancelled [!5593](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5593) - Fix AWS GovCloud with AWS S3 cache [!5613](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5613) - Fix final job duration handling [!5583](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5583) - Document DinD DNS behavior with network-per-build feature [!5611](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5611) - Run git config cleanup before creating the template dir [!5598](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5598) - Fix authentication towards HTTP docker registries [!5329](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5329) (François HORTA @fhorta1) - Switch the default for FF_GIT_URLS_WITHOUT_TOKENS back to false [!5572](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5572) - Handle the new glrtr- prefix [!5580](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5580) ### Maintenance - Update golang.org/x/net to fix CVE-2025-22872 [!5594](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5594) - Added missing commas [!5579](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5579) - Docker executor image clarification and macOS virtualization info [!5571](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5571) (Charles Uneze @network-charles) - Fix rules for the unit test job [!5618](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5618) - Tidy runner_wrapper/api [!5604](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5604) - Limit Unit test job to only MR pipelines for forks [!5608](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5608) - Making timeout to acquire a new instance configurable within gitlab-runner [!5563](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5563) (Moritz Scheve @schevmo) - Remove outdated information [!5620](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5620) - Add correlation_id to request logs [!5615](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5615) - Bump base-images to 0.0.18 [!5633](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5633) - Fix pipelines for forks [!5607](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5607) - Update redirected links [!5605](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5605) - Improve pipelines for community, fork, docs MRs [!5576](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5576) - Fix protoc binary download for macos [!5570](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5570) - Document how to install GitLab Runner on z/OS manually [!5641](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5641) - Upgrade Go to v1.24.3 [!5562](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5562) - Clean up stray whitespace [!5585](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5585) - Remove kaniko references in GitLab Runner docs [!5560](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5560) - Update step-runner dependency version to 0.11.0 [!5645](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5645) - Update dates in examples [!5621](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5621) - Clean up tables in misc runner docs [!5589](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5589) - Docs: more bold cleanup [!5586](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5586) - Document how to set environment variables in GitLab Runner Helm chart [!5559](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5559) - Restrict danger-review to canonical GitLab forks [!5640](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5640) - Push GitLab Documentation Translations Upstream [2025-06-09] [!5630](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5630) - docs: Add custom executor "shell" property [!5578](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5578) (Guillaume Chauvel @guillaume.chauvel) - Remove randomness of TestProcessRunner_BuildLimit failures [!5588](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5588) - Run Hugo build test on the correct Docs branch [!5545](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5545) - Install local dev tools and dependency binaries in on go [!5632](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5632) - chore: install tool binaries in tmp bin [!5629](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5629) - docs: Remove a line that is not accurate to the current usages of GRIT [!5601](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5601) - Config options refactor [!5373](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5373) - Move internal docs into development directory [!5595](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5595) - Update CHANGELOG to take in account 17.10.x to 18.0.x releases [!5643](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5643) - Update docker machine in docs [!5603](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5603) - Remove outdated mention [!5582](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5582) - Added Experimental Status [!5602](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5602) - Remove outdated content [!5597](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5597) - Use mockery constructors in tests [!5581](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5581) - Auto-format all remaining runner tables [!5584](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5584) - Update the cntlm link to the new fork [!5556](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5556) (Dan Fredell @DFredell) - Update docker-machine version in docs [!5617](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5617) ## v18.0.3 (2025-06-11) ### Bug fixes - Fix AWS GovCloud with AWS S3 cache [!5613](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5613) - Run Git config cleanup before creating the template dir [!5598](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5598) ### Maintenance - Remove randomness of TestProcessRunner_BuildLimit failures [!5588](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5588) ## v17.11.3 (2025-06-11) ### Bug fixes - Fix AWS GovCloud with AWS S3 cache [!5613](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5613) ## v17.10.2 (2025-06-11) ### Bug fixes - Fix AWS GovCloud with AWS S3 cache [!5613](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5613) ## v17.11.2 (2025-05-22) ### Bug fixes - Handle the new glrtr- prefix [!5580](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5580) - Fix final job duration handling [!5583](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5583) ## v18.0.2 (2025-05-21) ### Bug fixes - Handle the new glrtr- prefix [!5580](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5580) - Fix final job duration handling [!5583](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5583) ## v18.0.1 (2025-05-16) ### Bug fixes - Switch the default for FF_Git_URLS_WITHOUT_TOKENS back to false [!5572](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5572) ## v18.0.0 (2025-05-15) ### New features - Add exponential backoff to execute stage retries [!4517](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4517) - Add support for uid:gid format for Kubernetes executor options [!5540](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5540) - Add adaptive request concurrency [!5546](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5546) - Add more request_concurrency related metrics [!5558](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5558) - Suppress unnecessary warnings when Kubernetes user values are empty [!5551](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5551) - Shells: Implement the use of Git-clone(1) again [!5010](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5010) - Adding How To Configure PVC Cache [!5536](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5536) - Improve runner build failure reasons [!5531](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5531) ### Bug fixes - Add support for submodules in the exec command [!75](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/75) (Lucas @fresskoma) - Reimplement pull-policy validation [!5514](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5514) - Update fleeting dependency [!5535](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5535) - Add Cloud provider error message details for cache upload failures to cloud storage targets [!5527](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5527) ### Maintenance - FF_Git_URLS_WITHOUT_TOKENS defaults to true [!5525](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5525) - Wait for MR image before starting runner incept [!5528](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5528) - Remove outdated mentions [!5510](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5510) - Update markdownlint for JP Docs & Push Translations [!5547](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5547) - Remove GitHub.com/Docker/machine library dependency [!5554](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5554) - Promote FF_RETRIEVE_POD_WARNING_EVENTS to a config print_pod_warning_events [!5377](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5377) - Deprecate ServiceAccountName [!5523](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5523) - Create Japanese documentation directory [!5513](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5513) - Bump base images to address CVE-2024-8176 [!5518](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5518) - Retry packagecloud 504 errors [!5520](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5520) - Remove outdated registration in test script [!5511](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5511) - Use Hugo 0.145.0 for docs builds [!5521](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5521) - Sync vale rules from main repo [!5549](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5549) - Remove section referring to unapplied breaking change [!5529](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5529) - Add recommendation to read Readme for plugin before installing [!5530](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5530) - Add troubleshooting section for AZRebalance issue [!5494](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5494) - Upgrade taskscaler dependency with updated heartbeat functionality [!5553](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5553) - Update GPG key expiry date [!5539](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5539) - Add instructions for installing prebuilt images while using binaries [!5508](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5508) - Add note regarding support for only Azure VMSS Uniform Orchestration mode [!5526](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5526) - Remove "Autoscaler algorithm and parameters" from the GitLab Runner instance group autoscaler page [!5517](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5517) - Add argo_translation.yml for continuous translation process [!5541](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5541) - Clean up tables in runner docs [!5548](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5548) - Make dependant Docker images optional for runner incept [!5538](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5538) ## v17.11.1 (2025-05-05) ### Bug fixes - Update fleeting dependency [!5535](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5535) ## v17.11.0 (2025-04-14) ### New features - Add ubuntu arm64 pwsh runner helper image [!5512](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5512) - kubernetes/docker executor: add job timeout as annotations/labels [!5463](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5463) (Gordon Bleux @UiP9AV6Y) - docs: add information about GRIT support and min_support to docs [!5460](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5460) - GLR | winrm+https and Protocol Port [!5301](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5301) (Brayden White @brayden-lm) - docs: add section to docs about who is using GRIT [!5462](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5462) - Fix cache's Last-Modified header by ensuring it is set to UTC [!5249](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5249) (clyfish @clyfish) - Specify which "user" shall run the job from the gitlab-ci.yaml for k8s executor [!5469](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5469) - Allow overriding FILTER_FLAG in clear-docker-cache script [!5417](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5417) - docs: Add documentation about Advanced Configuration for GRIT [!5500](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5500) - Expose started_at and finished_at values in Usage Log job details [!5484](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5484) - Ensure automatic `git gc` operations run in the foreground [!5458](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5458) (Ben Brown @benjamb) - Enable FF_USE_NATIVE_STEPS by default [!5490](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5490) - docs: add contributing section for GRIT docs [!5461](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5461) ### Bug fixes - RmFilesRecursive should not attempt to delete directories [!5454](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5454) - Sign Windows runner binary executables [!5466](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5466) - Clean git config [!5438](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438) - Add note mentioning PathTooLongException regression on Windows [!5485](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5485) - Update docs re. ECS Fargate image override [!5476](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5476) - Fix powershell stdin data race [!5507](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5507) (Guillaume Chauvel @guillaume.chauvel) - Change directories and files permissions for bash shell when FF_DISABLE_UMASK_FOR_KUBERNETES_EXECUTOR is enabled [!5415](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5415) - Fix usage log timestamp generation [!5453](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5453) - Fix cache extractor redownloading up-to-date caches for Go Cloud URLs [!5394](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5394) - Fix CI_JOB_TOKEN storage and removal of credentials [!5430](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5430) (Guillaume Chauvel @guillaume.chauvel) - Authenticate runner requests with JOB-TOKEN instead of PRIVATE-TOKEN [!5470](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5470) ### Maintenance - SNPowerShell is only for Windows, remove OS check [!5498](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5498) (Guillaume Chauvel @guillaume.chauvel) - ServiceAccountName deprecation [!5501](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5501) - Consistent CI yaml formatting [!5465](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5465) - Mask sensitive config fields for debug logs [!5116](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5116) (ZhengYuan Loo @loozhengyuan) - Make sure that inline config can't override the 'default: false' setting [!5436](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5436) - chore: consolidate regexes into a single regex [!5390](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5390) - remove outdated mentions [!5499](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5499) - Update target milestone from 18.0 to 20.0 for runner registration token [!5487](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5487) - docs: Clarify how autoscaler idle_time is calculated [!5474](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5474) - Update GitLab Runner Ubuntu support matrix to pin to end of standard support [!5424](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5424) - Update link to tech writing course in `gitlab-runner` repo [!5433](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5433) - Fix broken test due to sort order [!5479](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5479) - Run unit tests in the Kubernetes cluster [!5420](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5420) - feat: describe how to use FARGATE_TASK_DEFINITION [!5439](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5439) (Jonathan @KJLJon) - Remove v0.2 of SLSA as no longer supported [!5475](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5475) - Backfill missing changelog entries for v17 releases [!5450](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5450) - Make Alpine 3.19 the default base for helper images [!5435](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5435) - Update linting tools in project [!5503](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5503) - Docs: Update link to documentation labels in runner repo [!5472](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5472) - Trigger downstream pipeline to test OS packages [!5416](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5416) - docs: Add warning against sharing autoscaling resources [!5445](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5445) - Add job to rebase branches on main [!5497](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5497) - Eliminate dependencies needed in `yaml:lint` CI job [!5467](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5467) - Update docker-machine version to v0.16.2-gitlab.34 [!5451](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5451) - Deploy to Kubernetes cluster with KUBERNETES_DEPLOY_BRANCH condition [!5489](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5489) - Update step-runner module to v0.8.0 [!5488](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5488) - Migrate to mockery's packages configuration [!5480](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5480) - Add Support Warning to Fargate custom tutorial [!4911](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4911) - Bump base images for CVE [!5483](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5483) - Docs: Hugo migration - Updating gitlab-runner doc links [!5448](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5448) - Separate GitLab Runner autoscaler content [!5468](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5468) - Add a prerequisite to Windows Runner documentation [!5473](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5473) - Update example [!5509](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5509) - Add "file name too long" troubleshooting error due to job token breaking change [!5496](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5496) - Update docker machine version [!5482](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5482) - Revert "Don't make this pipeline depend on the downstream pipeline" [!5449](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5449) - Remove mention of GitLab 18.0 removal [!5437](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5437) - Simplify git credential get [!5447](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5447) (Guillaume Chauvel @guillaume.chauvel) - Back up unsigned binaries [!5478](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5478) - Improve concurrent-related messages and docs [!5143](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5143) - Add permissions docs for Operator containers [!5444](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5444) - Fix k8s integration tests resource groups [!5502](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5502) - GitLab Runner instance group autoscaler doc improvements [!5492](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5492) - Add note to not install runner in AMI and standardize capitalization, note no multi-zone instance group support [!5495](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5495) - Fix incept tests [!5434](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5434) - Add how to exclude image [!5335](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5335) - Handle vulnerabilty against CVE-2025-30204 [!5481](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5481) - Fix package tests pipeline trigger [!5452](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5452) ## v17.10.1 (2025-03-26) ### Bug fixes - RmFilesRecursive should not attempt to delete directories [!5454](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5454) - Fix usage log timestamp generation [!5453](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5453) ## v17.10.0 (2025-03-19) ### New features - Add support for fleeting heartbeats/connectivity check before instance acquisition [!5340](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5340) - Add GPUs support for services [!5380](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5380) (Sigurd Spieckermann @sisp) - Add add-mask functionality to proxy-exec [!5401](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5401) - [docker] Expand variables in volume destinations [!5396](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5396) - Update runner process wrapper [!5349](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5349) - Add devices support on services [!5343](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5343) (Sigurd Spieckermann @sisp) - Add proxy shell execution [!5361](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5361) ### Security fixes - Merge branch 'sh-cache-upload-env-file' into 'main' [!5408](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5408) ### Bug fixes - Allow OS overwrite via ShellScriptInfo [!5384](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5384) - Downgrade prebuilt runner helper images to Alpine 3.19 [!5426](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5426) - Fix HTTP retries not working properly [!5409](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5409) - Make submodule `--remote` more resilient [!5389](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5389) - Fix runner_wrapper gRPC API client [!5400](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5400) - Fix inconsistent arguments when creating a service in tests [!5355](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5355) (Sigurd Spieckermann @sisp) - Exclute helpers/runner_wrapper/api/v* tags from version evaluation [!5427](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5427) - Clean git config [!5442](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5442) - Support non-ASCII characters in gzip artifact headers [!5186](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5186) - Only add step-runner volume mount when native steps is enabled [!5398](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5398) - Fix json schema validation warnings [!5374](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5374) - Detect bucket location when not provided [!5381](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5381) - Clean git config [!5438](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438) - Fix table rendering [!5393](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5393) ### Maintenance - Update vale rules for runner docs [!5388](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5388) - Fix Vale issues in Runner docs: Part 17 [!5405](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5405) - Avoid using deprecated class for review apps [!5382](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5382) - Fix Vale issues in Runner docs: Part 21 [!5419](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5419) - Update the example versions [!5413](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5413) - Merge 17.9.1 CHANGELOG into main [!5410](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5410) - Fix CVE-2024-45338 by updating golang.org/x/net [!5404](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5404) - Fix autoscaler policy table format [!5387](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5387) - Use pipeline helper-binary for custom, instance and ssh integration tests [!5386](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5386) - Update docker-machine version in docs [!5366](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5366) - Update route map for runner review apps [!5365](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5365) - Update docs content to use Hugo shortcodes [!5362](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5362) - Update zstandard version to 1.5.7.20250308 [!5411](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5411) - Fix CVE-2025-27144 by upgrading github.com/go-jose/go-jose/v3 [!5403](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5403) - Use correct values for log_format [!5376](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5376) - Upgrade Ubuntu image to 24.04 [!5428](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5428) - Bump runner base images version to 0.0.10 [!5423](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5423) - Only use docs-gitlab-com project for review apps [!5364](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5364) - Add new supported runner package distros [!5425](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5425) - Bump base image version to 0.0.9 [!5407](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5407) - Bump Go to version 1.23.6 [!5326](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5326) - Fix Vale issues in Runner docs: Part 14 [!5383](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5383) - Fix rules for trigger deploy kube job [!5369](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5369) - Fix small typo [!5422](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5422) - Make UBI image wait for downstream pipeline success [!5360](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5360) - Add clarification on the support policy for the docker machine executor to autoscale.md [!5359](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5359) - Use TW Team Docker image for site build test [!5391](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5391) - Consistently use local helper image in CI [!5371](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5371) - Add clarification on the support policy for the docker machine executor to dockermachine.md [!5358](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5358) - Update feature flag docs template for Hugo site launch [!5258](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5258) - Fix Vale issues in Runner docs: Part 20 [!5418](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5418) - Fix Vale issues in Runner docs: Part 19 [!5412](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5412) - Fix Vale issues in Runner docs: Part 18 [!5406](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5406) - Added executor supported OS and selection criteria - part 1 [!5345](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5345) - Remove duplicate hugo code to fix broken master [!5368](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5368) - Add comment regarding scale in protection for an AWS auto scaling group [!5348](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5348) - Update links to docs from runner docs [!5363](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5363) - Update links for jobs and tags [!5375](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5375) - Update documentation to point to Rake task to deduplicate tags [!5356](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5356) - Pin zstandard version and specify checksum [!5395](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5395) - Move trigger deploy to kubernetes to a deploy stage [!5372](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5372) - Fixed Vale issues [!5378](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5378) ## v17.9.2 (2025-03-20) ### Bug fixes - [17.9] Downgrade prebuilt runner helper images to Alpine 3.18 [!5431](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5431) - Clean git config [!5441](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5441) - Clean git config [!5438](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438) - [17.9] Fix HTTP retries not working properly [!5432](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5432) ## v17.9.1 (2025-03-07) ### Security fixes - Merge branch 'sh-cache-upload-env-file' into 'main' [!5408](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5408) ## v17.9.0 (2025-02-20) ### New features - Add support for fleeting heartbeats/connectivity check before instance acquisition [!5340](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5340) - Remove lock files left over in .git/refs [!5260](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5260) (Ben Brown @benjamb) - Autogenerate documentation for supported linux distros/versions [!5276](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5276) - use '-f' to allow for race condition (issue #38447) [!5324](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5324) (Christian Moore @moorehfl) - Allow custom naming of service container for the k8s executor [!4469](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4469) - Mask by default all known token prefixes [!4853](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4853) - Introduce new custom executor build exit code [!5028](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5028) (Paul Bryant @paulbry) - Add GRIT documentation [!5263](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5263) - Expand default labels on build pods [!5212](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5212) (Zalan Meggyesi @zmeggyesi) - Add finished job usage data logging [!5202](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5202) - Add gitlab_runner_job_prepare_stage_duration_seconds histogram [!5334](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5334) - Inject the step-runner binary into the build container [docker executor] [!5322](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5322) - Run rpm_verify_fips against FIPS images [!5317](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5317) - Support ImageLoad for prebuilt images [!5187](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5187) - Update step-runner docker executor integration docs [!5347](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5347) - Add labeling to Usage Logger [!5283](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5283) ### Security fixes - Bump base images version to 0.0.6 [!5346](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5346) ### Bug fixes - Upgrade RUNNER_IMAGES_VERSION to v0.0.4 [!5305](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5305) - Fix Role ARN support with S3 Express buckets [!5291](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5291) - Fix Windows image gitlab-runner-helper path [!5302](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5302) - Image pusher fixes [!5294](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5294) - Fix step-runner inject container run [!5354](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5354) - Improve job final update mechanism [!5275](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5275) - Revert "Merge branch 'sh-fix-role-arn-s3-express' into 'main'" [!5308](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5308) - Deflake pod watcher tests [!5310](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5310) - Fix runner image missing tag [!5289](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5289) - Do not create containers with duplicate env vars [!5325](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5325) - Upgrade RUNNER_IMAGES_VERSION to v0.0.3 [!5300](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5300) - Fix race in pod watcher test [!5296](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5296) - Fix runner release bugs [!5286](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5286) - Document how to configure S3 Express buckets [!5321](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5321) - Make custom_build_dir-enabled optional [!5333](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5333) - Push the helper image packages to S3 [!5288](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5288) - Create copy of aliased helper images, not symlinks [!5287](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5287) - Disable interactive git credentials [!5080](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5080) - Add clear-docker-cache script to runner image [!5357](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5357) - Gracefully handle missing informer permissions [!5290](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5290) - Catch external pod disruptions / terminations [!5068](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5068) - Fix a Vault kv_ v2 error [!5341](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5341) - Document apt limitation and required workaround [!5319](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5319) - CI: add release on riscv64 [!5131](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5131) (Meng Zhuo @mengzhuo1203) - Fix missing default alpine images [!5318](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5318) ### Maintenance - Add clarification on the support policy for the docker machine executor to dockermachine.md [!5358](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5358) - Update docs content to use Hugo shortcodes [!5362](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5362) - Update self-managed naming in all Runner docs [!5309](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5309) - Run ubi images with BUILD_COMMIT_SHA and PARENT_PIPELINE_ID [!5244](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5244) - Fix formatting and add link to GRIT docs [!5273](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5273) - Replace deprecated field name with the new name [!5298](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5298) - Bump base image version [!5282](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5282) - Docs: Fix broken external links in runner docs [!5344](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5344) - Deploy each commit from main to kubernetes cluster [!5314](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5314) - Fix flaky logrotate write test [!5292](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5292) - Update step-runner library version to 0.3.0 [!5272](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5272) - Make sure deploy to kubernets works only on main [!5352](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5352) - Add global operator config options docs [!5351](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5351) - Update offering badges to standard name [!5303](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5303) - Update feature flag docs template for Hugo site launch [!5258](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5258) - Docs update - Update Architecture naming for GRIT [!5274](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5274) - Properly handle shortening for tokens with prefix glcbt- [!5270](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5270) - Document userns_mode by providing links to Docker docs [!5194](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5194) - Document select executors information as an unordered list [!5268](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5268) - Update links to docs from runner docs [!5363](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5363) - Docs: Render RPM distro table correctly [!5338](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5338) - Fix helper-bin-host target [!5252](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5252) - Reduce busy work in main job loop [!5350](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5350) - Add riscv64 binary download links [!5304](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5304) (Meng Zhuo @mengzhuo1203) - Remove hosted runner section from under Administer [!5299](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5299) - Update docker-machine version [!5339](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5339) - More debug logging for artifact uploads & troubleshoot docs [!5285](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5285) - Update taskscaler to get ConnectInfo fix for state storage instances [!5281](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5281) (Matthias Baur @m.baur) - Use embedded VCS information rather than add manually [!5330](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5330) - Add clarification on the support policy for the docker machine executor to autoscale.md [!5359](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5359) - Fix windows image zstd compressing [!5323](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5323) - Clean up unused GetUploadEnv() in cache code [!5265](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5265) - Document proxy and self-signed certificate error [!5280](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5280) - Add service_account parameter in [runners.kubernetes] section [!5297](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5297) - Docs: add the mount_propagation parameter to the k8s executors documentation [!5353](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5353) (Georgi N. Georgiev @ggeorgiev_gitlab) - Roll docs linting tooling forward [!5284](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5284) - Rename index and move titles to frontmatter [!5327](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5327) - Direct-use of the `rpm` command adversely impacts the `yum`/`dnf` database... [!5311](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5311) (Thomas H Jones II @ferricoxide) - Disable Windows Defender properly [!5279](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5279) - Add support for building docker images for local dev [!5271](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5271) (Anthony Juckel @ajuckel) - Add a CI job to test the docs website build [!5306](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5306) - Add a template for kubernetes feature toggle [!5315](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5315) - Remove obsolete note regarding Alpine DNS issues [!5320](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5320) (Craig Andrews @candrews) ## v17.8.4 (2025-03-20) ### Security fixes - Use a dotenv file to store cache environment variables [!5414](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5414) ### Bug fixes - Clean git config [!5440](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5440) - Clean git config [!5438](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438) ## v17.8.3 (2025-01-23) ### Bug fixes - Upgrade RUNNER_IMAGES_VERSION to v0.0.4 [!5305](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5305) - Fix Role ARN support with S3 Express buckets [!5291](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5291) ### Maintenance - Run ubi images with BUILD_COMMIT_SHA and PARENT_PIPELINE_ID [!5244](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5244) ## v17.8.2 (2025-01-22) ### Bug fixes - Upgrade RUNNER_IMAGES_VERSION to v0.0.4 [!5305](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5305) - Fix Role ARN support with S3 Express buckets [!5291](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5291) - Fix Windows image gitlab-runner-helper path [!5302](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5302) - Upgrade RUNNER_IMAGES_VERSION to v0.0.3 [!5300](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5300) - Image pusher fixes [!5294](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5294) ### Maintenance - Run ubi images with BUILD_COMMIT_SHA and PARENT_PIPELINE_ID [!5244](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5244) ## v17.8.1 (2025-01-17) ### Bug fixes - Fix runner release bugs [!5286](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5286) - Fix Windows image gitlab-runner-helper path [!5302](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5302) - Image pusher fixes [!5294](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5294) - Push the helper image packages to S3 [!5288](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5288) - Upgrade RUNNER_IMAGES_VERSION to v0.0.3 [!5300](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5300) - Fix runner image missing tag [!5289](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5289) - Create copy of aliased helper images, not symlinks [!5287](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5287) - Upgrade RUNNER_IMAGES_VERSION to v0.0.4 [!5305](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5305) - Fix Role ARN support with S3 Express buckets [!5291](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5291) ### Maintenance - Bump base image version [!5282](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5282) - Run ubi images with BUILD_COMMIT_SHA and PARENT_PIPELINE_ID [!5244](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5244) ## v17.8.0 (2025-01-13) ### New features - Add mount propagation mode for hostpath mounts on kubernetes [!5157](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5157) (Brinn Joyce @brinn.joyce) - Add RoleARN to handle both upload and download S3 transfers [!5246](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5246) - Expand variables for the docker platform with unit tests [!5146](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5146) (John Sallay @jasallay) - Document RoleARN configuration parameter [!5264](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5264) - Add support for Windows 24H2 [!5170](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5170) (Martin Blecker @AdrianDeWinter) ### Bug fixes - Fix docker network config for Windows [!5182](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5182) - Limit UploadARN session duration to 1 hour [!5230](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5230) - Incompatible pull policies should not be a retryable error [!5256](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5256) - Fix issue #29381: Missing labels from Docker config when starting service containers [!4913](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4913) (Andrew Rifken @arifken) - Fix runner deb package upgrade [!5251](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5251) ### Maintenance - Fix incorrect references to packagecloud.io [!5242](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5242) - Fixed Vale issues in Runner docs: Part 9 [!5239](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5239) - Upgrade Akeyless SDK to v4 [!5234](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5234) (Amir Maor @amir.m2) - Update documentation for manual installation of the new packages [!5247](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5247) (Victor De Jong @victordejong) - Note Reuse previous clone if it exists support for k8s [!5248](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5248) (Ben Bodenmiller @bbodenmiller) - Add note about being unable to change some settings via config template due to known issue [!5240](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5240) - A couple of minor tweaks to the gitlab-runner-helper-images package [!5262](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5262) - Correct spelling in comment [!5181](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5181) (MarlinMr @MarlinMr) - Clarify docker container support policy [!5232](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5232) - Fixed Vale issues in the Configure GitLab Runner on OpenShift doc [!5208](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5208) - Remove misleading information about the initiation of the session server [!5238](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5238) (Nicolas @nicoklaus) - Use runner base images [!5148](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5148) - Update 17-7 changelogs [!5259](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5259) - Let Docker site redirect to latest version [!5222](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5222) - Improve documentation for Azure workload identities [!5221](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5221) - Change anyuid service account to gitlab-runner-app-sa [!5237](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5237) - Put `RPM` in backticks [!5255](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5255) - Fix a 404 error in the Runner repo [!5254](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5254) - Clarify ServiceAccount of the runner manager [!5250](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5250) - Add Troubleshooting for docker autoscaler executor [!5220](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5220) - Runner cache s3 table cleanup [!5267](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5267) - Fix fork pipelines by ensuring windows tag refs exist [!5241](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5241) - Update steps version to 0.2.0 [!5219](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5219) - Update step-runner library version to 0.3.0 [!5272](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5272) - Update golang.org/x/crypto to v0.31.0 [!5253](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5253) ## v17.7.1 (2025-01-17) ### Bug fixes - Fix runner deb package upgrade [!5251](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5251) ### Maintenance - Update step-runner library version to 0.3.0 [!5272](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5272) - A couple of minor tweaks to the gitlab-runner-helper-images package [!5262](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5262) ## v17.7.0 (2024-12-19) ### New features - Move exported helper images into separete package [!5190](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5190) ### Bug fixes - Update gitlab.com/gitlab-org/fleeting/fleeting version and other deps [!5207](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5207) - Fix flaky step-integration test [!5199](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5199) - [k8s] Do not wait poll timeout when container has terminated [!5112](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5112) - Fix docker network config for Windows [!5182](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5182) - Use GoCloud URLs for Azure downloads [!5188](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5188) - Merge Outstanding Security MRs [!5171](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5171) ### Maintenance - Add 'Example' column to Docker runner advanced configuration docs [!5177](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5177) - Bump UBI base images to the newest 9.5.x versions [!5185](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5185) - Revert "Merge branch 'avonbertoldi/git-lfs-is-bad' into 'main'" [!5169](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5169) - Make build environment cache friendly [!5179](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5179) - Doc/runner spelling exceptions [!5162](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5162) - Moved some contextual info to runner registration section [!5178](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5178) - Note in logs when runner manager is being unregistered [!5166](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5166) - Custom.md: Fix typo [!5163](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5163) (Per Lundberg @perlun) - Fix podman troubleshooting doc [!5211](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5211) (Thorsten Banhart @banhartt) - Bump Go compiler version to 1.23.2 [!5153](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5153) - Skip homedir fix test on Windows to not block the release [!5164](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5164) - Update supported runner OS versions [!5217](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5217) - Add docker connection error to troubleshooting [!5165](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5165) - Skip testKubernetesBuildCancelFeatureFlag as it's flaky [!5228](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5228) - Update file configuring_runner_operator.md [!5198](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5198) - Add Windows Server 2025 to the Windows version support policy [!5183](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5183) - Actually update step-runner version to 0.2.0 [!5227](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5227) - Migrate Kubernetes integration tests to use the Runner Kubernetes Cluster [!5175](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5175) - Remove redundant prepare runner-incept variables job [!5197](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5197) - Fix apt package install version string and change to a newer version in the docs [!5180](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5180) (Thorsten Banhart @banhartt) - Kubernetes API reference analyzer based on types instead of field names [!5158](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5158) - Update steps version to 0.2.0 [!5219](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5219) - Fix `limitations` in `gitlab-runner` repo [!5201](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5201) - Fix Vale issues in Kubernetes executor doc [!5196](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5196) - Add feature flag issue templates [!5156](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5156) - Fix TestStackDumping test freezing [!5210](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5210) - Separate kubernetes integration tests resource groups [!5223](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5223) - Make fuzz variable mask job required to pass [!5209](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5209) - Remove semgrep-sast CI rules [!5184](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5184) - Speed up windows test runs [!5174](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5174) - Added information about Docker Autoscaler and Instance executors in the executor selection table [!5161](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5161) - Bump docker-machine to v0.16.2-gitlab.30 [!5218](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5218) - Fix fuzz variable mask test [!5135](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5135) - Remove the term worker from the Plan and operate a fleet of instance or group runners doc [!5189](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5189) - Remove links to interactive web terminals [!5176](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5176) - Make homedir easier to test [!5168](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5168) - Add node tolerations to kubernetes integration tests [!5229](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5229) - Document how to use Azure workload identities for the cache [!5204](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5204) - Simplify kubernetes integration test names [!5024](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5024) - Clarify docker container support policy [!5232](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5232) - Update alpine versions [!5214](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5214) - Add ability to create review apps by using the GitLab Docs Hugo project [!5205](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5205) - Update dependency danger-review to v2 [!5206](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5206) - Fix Vale issues in Runner docs: Part 5 [!5191](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5191) - Update experiment-beta page path [!5193](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5193) ## v17.6.1 (2024-12-19) ### Bug fixes - Fix docker network config for Windows [!5182](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5182) ## v17.6.0 (2024-11-20) ### New features - Invoke step-runner from $PATH instead of / [!5140](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5140) - Native Step Runner Integration for Docker Executor [!5069](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5069) - Really publish sles/opensuse runner packages [!5101](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5101) - Terminate job and display error when services are oom killed on the kubernetes executor [!4915](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4915) (Zach Hammer @zhammer) - Add taskscaler state storage options [!5061](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5061) - Create a GitLab Runner process wrapper [!5083](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5083) - Warn user if no fleeting plugins to install [!5115](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5115) (ZhengYuan Loo @loozhengyuan) ### Bug fixes - [k8s] Terminate PowerShell Script children processes when cancelling the job through UI [!5081](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5081) - Fix path-style requests with Upload ARN functionality [!5150](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5150) - Remove trailing "/" from cache fallback keys [!5076](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5076) - Omit canonical ports for S3 endpoints [!5139](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5139) - Fix UploadRoleARN URL when other regions are used [!5113](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5113) - Fix home directory detection [!5087](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5087) - Upgrade github.com/mvdan/sh to v3.9.0 [!5085](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5085) - Disable FF_GIT_URLS_WITHOUT_TOKENS by default [!5088](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5088) - Fix S3 cache access for buckets located outside US [!5111](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5111) - Fix auth issues with FF_GIT_URLS_WITHOUT_TOKENS: 1 [!5103](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5103) - Wait for k8s pod to become attachable as part of poll period in exec mode [!5079](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5079) - Default to us-east-1 region for AWS SDK v2 [!5093](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5093) - Hide Pod wait to be attachable behind a FF [!5098](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5098) - Fix fleeting plugin installation architecture selection [!5090](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5090) ### Maintenance - Remove fault tolerance section [!5154](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5154) - Update CONTRIBUTING.md and LICENSE [!5133](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5133) - Fix Vale issues in Runner docs: Part 4 [!5160](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5160) - Added docker autoscaler and instance executors info [!5128](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5128) - Replace the term shared runner with instance runner in Runner docs [!5104](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5104) - Add AWS hosted MacOS instance troubleshooting note [!5082](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5082) - Upgrade github.com/BurntSushi/toml, dario.cat/mergo [!5086](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5086) - Add comment to help future users [!5070](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5070) - Documented Podman with Runner K8s executor [!5056](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5056) - Update CHANGELOG for v17.5.3 [!5136](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5136) - Fix mage k8s:generatePermissionsDocs intermittent test faliures [!5107](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5107) - Update CI release task to upload with AWS CLI [!5106](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5106) - Remove broken link [!5118](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5118) (Zubeen @syedzubeen) - Set gitlab-advanced-sast job to run on code changes [!5097](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5097) - Fix Vale issues in Runner docs: Part 1 [!5149](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5149) - Make docker and helper image jobs optional [!5141](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5141) - Install git-lfs in ubi image from upstream RPM repo [!5122](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5122) - Follow up MR to add changes to MR 5120 [!5123](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5123) - Restore 2nd method of restarting after config [!5077](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5077) - Improve helpers/cli/FixHOME [!5089](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5089) - Add integration tests to cover service container behaviour [!5144](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5144) - Skip TestBuildContainerOOMKilled integration test [!5151](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5151) - Update GitLab Runner CHANGELOG after v17.5.2, v17.4.2, v17.3.3, v17.2.3 patches [!5120](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5120) - Stop testing UseWindowsLegacyProcessStrategy for KillerTest [!5102](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5102) - Skip homedir fix test on Windows to not block the release [!5164](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5164) - Update GitLab Runner CHANGELOG after v17.5.1 patch [!5121](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5121) - Fix a technical error in the Podman doc [!5138](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5138) - Remove github.com/tevino/abool in favor of atomic.Bool [!5072](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5072) - Removed fault tolerance section [!5159](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5159) - Doc/executor intro feedback [!5155](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5155) - Note in logs when runner manager is being unregistered [!5166](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5166) - Delete Runner topics marked for removal [!5124](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5124) - Minor documentation corrections [!5110](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5110) - Explain relationship between limit and burst in runner autoscaler configs [!5100](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5100) - Transfer MR short commit SHA to Runner Incept [!5130](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5130) - Update Configuration of the metrics HTTP server for runners [!5142](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5142) - Replace config_exec_args with config_args [!5109](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5109) (Davide Benini @davidebenini) - Add tests for service name empty [!5065](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5065) - Fixed Vale issues Runner docs: Part 2 [!5152](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5152) ## v17.5.5 (2024-12-19) ### Bug fixes - Fix docker network config for Windows [!5182](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5182) ## v17.5.4 (2024-11-19) ### Maintenance - Fix mage k8s:generatePermissionsDocs intermittent test faliures [!5107](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5107) ## v17.5.3 (2024-10-31) ### Bug fixes - Fix UploadRoleARN URL when other regions are used [!5113](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5113) - Fix S3 cache access for buckets located outside US [!5111](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5111) ## v17.5.2 (2024-10-22) ### New features - Publish SLES and openSUSE runner packages [!5101](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5101) ### Bug fixes - Fix fleeting plugin installation architecture selection [!5090](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5090) - Default to us-east-1 region for AWS SDK v2 [!5093](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5093) - Hide Pod wait to be attachable behind a feature flag [!5098](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5098) ### Maintenance - Stop testing `UseWindowsLegacyProcessStrategy` for `KillerTest` [!5102](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5102) ## v17.4.2 (2024-10-22) ### Bug fixes - Remove trailing "/" from cache fallback keys [!5076](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5076) ## v17.3.3 (2024-10-22) ### Bug fixes - Remove trailing "/" from cache fallback keys [!5076](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5076) ## v17.2.3 (2024-10-22) ### Bug fixes - Remove trailing "/" from cache fallback keys [!5076](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5076) ## v17.5.1 (2024-10-18) ### Bug fixes - Disable `FF_GIT_URLS_WITHOUT_TOKENS` by default [!5088](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5088) - Fix home directory detection [!5087](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5087) ## v17.5.0 (2024-10-17) ### New features - Document fault tolerance feature [!5058](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5058) - Add namespace support for DOCKER_AUTH_CONFIG [!4727](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4727) (Tobias Rautenkranz @tobiasrautenkranz) - Support AWS S3 multipart uploads via scoped temporary credentials [!5027](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5027) - Limit token exposure [!5031](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5031) - Add support for Azure Managed Identities in cache [!5007](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5007) - Publish runner and helper packages for SLES and OpenSUSE [!4993](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4993) (Aazam Thakur @Alcadeus0) ### Bug fixes - Cancel stage script upon job cancellation in attach mode [!4813](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4813) - Make invalid service image name a build error [!5063](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5063) - Allow pull_policy to be unset when defining allowed_pull_policies [!4943](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4943) (Mike Mayo @magicmayo) - Resolve "get "panic: EOF" when register runners run in a container" [!5012](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5012) - Adjust autoscaler policy on config reloading [!5064](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5064) - Require only build container to start in Kubernetes [!5039](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5039) - Track kubernetes pull policies based off of the container name [!5036](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5036) - Downgrade go-fips base image to ubi8 [!5040](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5040) - Fix graceful termination of jobs on Windows [!4808](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4808) (Nils Gladitz @nilsgladitz) - Switch to AWS SDK for S3 cache access [!4987](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4987) - Remove quotes around IP address in ssh invocation in example [!4899](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4899) (Yassine Ibr @yassineibr1) - Wait for k8s pod to become attachable as part of poll period [!3556](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3556) (Jimmy Berry @jimmy-outschool) - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) - Fix `pod_annotations_overwrite_allowed` parsing error [!5032](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5032) - Fix bug in scripts/logs dir for k8s executor [!4893](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4893) (Thomas John Randowski @WojoInc) - Address CVE-2024-41110/GHSA-v23v-6jw2-98fq by upgrading github.com/docker/docker and github.com/docker/cli [!4925](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4925) ### Maintenance - Update CHANGELOG after patches release [!5073](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5073) - Helm chart install page: start structural revisions [!5038](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5038) - Refactor container entrypoint forwarder [!5018](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5018) - docs: set admin access for rancher desktop [!5062](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5062) - Update gocloud.dev for AWS client-side rate limiting fix [!5066](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5066) - Use latest markdownlint-cli2 and linter configuration [!5055](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5055) - Add use case to docs for system_id and reusing configurations [!5051](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5051) - Bump docker-machine to v0.16.2-gitlab.29 [!5047](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5047) - Docs: Link to Docker certificate docs [!5023](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5023) - Update GitLab release version for DualStack and Accelerate config [!5042](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5042) - Fix capitalization [!5015](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5015) (maximilian @maximiliankolb) - Use Windows test code coverage reports [!5041](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5041) - Update Docker image for docs review apps [!5020](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5020) - Remove trailing whitespace from GitLab Runner docs [!5074](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5074) - Updating intro sentence again [!5025](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5025) - Bump golang to 1.22.7 [!5035](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5035) - Fix community Merge Request pipeline parse errors [!4973](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4973) (Thorsten Banhart @banhartt) - Docker install: start with line-level cleanups [!5033](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5033) - Improve flaky waitForRunningContainer test [!5016](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5016) - Enable timestamps for CI/CD jobs [!5048](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5048) - Refactor the linux repository page to follow CTRT [!5019](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5019) - [k8s] more explicit docs on OS, Arch, KernelVersion selection [!5009](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5009) - Squelch jsonschema warning about DualStack config [!5022](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5022) - Clean up stray whitespace in gitlab-runner project [!5052](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5052) - Update interactive runner registration documentation [!5008](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5008) - Break apart Helm chart optional config into new page [!5054](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5054) - [docs] Fix concurrent_id being used when it is concurrent_project_id [!5026](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5026) (Nils @NilsIRL) - K8s install page: move troubleshooting info, tackle 2 subheads [!5034](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5034) - Add deprecation note to the Docker Machine autoscale configuration docs page [!5060](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5060) - Implement distroless UBI pattern [!4971](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4971) - Add deprecation note to the Docker Machine executor docs page [!5059](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5059) - Docker install page: clean up installation steps [!5037](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5037) - Fix code coverage visualization not working in merge requests [!5029](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5029) - Skip TestDockerCommand_MacAddressConfig on Windows OS [!4999](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4999) - Resolve "Proxy configuration docs missing NO_PROXY instructions" [!5017](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5017) - Remove license scanning template [!4735](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4735) - Add markdownlint-cli2 as asdf dependency [!5053](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5053) - Remove note that the Azure Fleeting plug-in is in beta from docs [!5046](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5046) - Fix example trust relationship in UploadRoleARN config [!5043](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5043) - Address line-level findings in Kubernetes install page [!5030](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5030) ## v17.4.1 (2024-10-10) ### Bug fixes - Require only build container to start in Kubernetes [!5039](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5039) - Downgrade go-fips base image to ubi8 [!5040](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5040) ## v17.3.2 (2024-10-10) ### Bug fixes - Downgrade go-fips base image to ubi8 [!5040](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5040) - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) ## v17.2.2 (2024-10-10) ### Bug fixes - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) ## v17.1.2 (2024-10-10) ### Bug fixes - Downgrade go-fips base image to ubi8 [!5040](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5040) - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) ## v17.0.3 (2024-10-10) ### Bug fixes - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) ## v16.11.4 (2024-10-10) ### Bug fixes - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) ## v16.10.1 (2024-10-10) ### Maintenance - Remove license scanning template [!4735](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4735) ## v17.4.0 (2024-09-19) ### New features - Forward entrypoint logs [!4883](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4883) - Akeyless support [!4975](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4975) ### Bug fixes - Custom executor script shouldn't end with '.' when no file extension [!4898](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4898) - Fix Docker+machine download URL [!5014](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5014) - Resolve "`gitlab-runner start` throws "FATAL: Failed to start GitLab-runner: exit status 134" when started prior to being logged in" [!4995](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4995) - Fix segfault in unregisterRunner when called with no arguments [!4932](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4932) - Prevent script dump on job cancellation through UI [!4980](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4980) - Make image generation fail upon failure to download dumb-init ubi-fips [!4955](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4955) ### Maintenance - Remove the GitLab Hosted Runners as an example of security risks with DIND [!5011](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5011) - CTRT: Refactor the intro for Install GitLab Runner [!4974](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4974) - Add link to debug trace docs page in the Runner Advanced Configuration doc [!4938](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4938) - Add reference to troubleshooting to install step 3c of the Install GitLab Runner on macOS doc [!4991](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4991) - Copy edit GitLab Runner system services doc [!4981](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4981) - Update the version of Ruby referenced in the Setup macOS runners docs. [!4977](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4977) - Add links to new autoscaling executors to the [[runners]] section in the Runner Advanced Configuration doc [!4930](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4930) - Update the default container registry of the helper images [!4935](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4935) - Fix fleeting plugin version constraint format documentation [!4985](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4985) (joola @jooola) - Add GitLab Advanced SAST to CI/CD config [!4965](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4965) - Change `Docker` to container in Kubernetes section of the Runner Advanced Configuration doc [!4957](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4957) - Update tooling for local development, fix FIPS requirements [!4937](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4937) - Add `Instance` and `Docker Autoscaler` executors to the default build dir section in the Runner Advanced Configuration doc [!4964](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4964) - Change `docker` to `container` in the image_pull_secrets parameter in the Runner Advanced Configuration doc [!4959](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4959) - Docker integration test for MacAddress setting [!4967](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4967) - Add `docker autoscaler` and `instance` executors to the runners custom build section in the runner Advanced Configuration doc [!4963](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4963) - Change `executor` description in the [[runners]] section in the Runner Advanced Configuration doc [!4931](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4931) - Runner instance generally available [!4998](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4998) - Update install GitLab Runner documentation for Amazon Linux [!4934](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4934) - Remove `Beta` from the [[runners.autoscaler]] section in the Runner Advanced Configuration doc [!4952](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4952) - Update the intro to the Shells table in Runner Advanced Configuration [!4941](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4941) - Add link to the `services` docs page in the Runner Advanced Configuration doc [!4948](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4948) - Change URL for nesting to docs page entry in the Runner Advanced Config doc [!4953](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4953) - Update 3.18 and 3.19 alpine info [!4944](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4944) - Fix broken links [!4936](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4936) - Use latest docs Docker image and linting configuration docs [!5001](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5001) - Revise install step 2 in the Install GitLab Runner on macOS doc [!4989](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4989) - Change the intro section in the Install GitLab Runner on macOS doc [!4988](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4988) - Don't depend on k8s.io/Kubernetes [!4984](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4984) - Remove `Alpine 3.16` from the runner images section in the Advanced Configuration doc [!4960](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4960) - Skip TestDockerCommand_MacAddressConfig on Windows OS [!4999](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4999) - CTRT: Refactor install GitLab Runner [!4983](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4983) - Add token newline troubleshooting item [!4966](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4966) - Add an example config for check interval [!4928](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4928) - Delete `experiment` label from `idleScaleFactor` [!4950](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4950) - Change designation of Fleeting plugin for Azure from BETA to generally available [!5013](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5013) - Updated Ruby version from 3.3 to 3.3.x [!4979](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4979) - Update the config.TOML example in Runner Advanced Config [!4927](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4927) - Add intro to the [[runners.nachine.autoscaling]] section [!4951](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4951) - Add link to the Git LFS page in the Runner Advanced Configuration doc [!4939](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4939) - Fleeting.md: fix bin path [!4914](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4914) (Naftoli Gugenheim @nafg) - Bump UBI base image from `9.4-1194` to `9.4-1227` [!4997](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4997) - Add rules to semgrep-sast job [!4923](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4923) - Update the Global Section in the Runner Advanced Config doc [!4926](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4926) - Docs(Kubernetes): mention AWS ASG Zone rebalancing [!5002](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5002) (Ummet Civi @ummetcivi) - Clarify allowed_pull_policies default [!4969](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4969) - Re-order sentences in the Helper image registry section of the Runner Advanced Config doc [!4961](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4961) - Change `images` to `container images` in the Kubernetes section of the Runner Advanced Configuration doc [!4958](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4958) - Add reference to Docker executor in the [runners.Docker] section [!4942](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4942) - Change to terminate process in the runner Advanced Configuration dov [!4947](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4947) - Revise install Step 3a in the Install GitLab Runner on macOS doc [!4990](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4990) - Backfill test for waitForRunningContainer [!4996](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4996) - Address GHSA-xr7q-jx4m-x55m by updating Google.golang.org/grpc to 1.64.1 [!4946](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4946) - Change text on the use of runner in offline environments in Runner Advanced Configuration doc [!4962](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4962) - Add `instance` and `docker-autoscaler` executors to the executors table [!4940](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4940) - Improve flaky waitForRunningContainer test [!5016](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5016) - Update `dumb-init` version on GitLab Runner images [!4956](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4956) - Bump Docker-machine to 0.16.2-GitLab.28 [!4924](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4924) - Change intro in the [runner.Kubernetes] section in the Runner Advanced Config doc [!4954](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4954) - Add troubleshooting step to resolve install error on Apple M1 to the Install GitLab Runner on macOS [!4992](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4992) - Update from EOL Ruby 2.7 to Ruby 3.3 in examples used in the Runner Advanced Configuration Doc [!4978](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4978) - Expand session server configuration example [!4929](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4929) ### Documentation changes - Add planning issue template [!4986](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4986) ## v17.3.1 (2024-08-21) ### Bug fixes - Make image generation fail upon failure to download dumb-init ubi-fips [!4955](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4955) ## v17.3.0 (2024-08-09) ### New features - Add debug log message for resolving Docker credentials [!4902](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4902) - Add Git_STRATEGY of "empty" to support clean build directories [!4889](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4889) (Nathan Cain @nathan.cain) ### Security fixes - Update azidentity dependency [!4903](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4903) ### Bug fixes - Gracefully stop long running processes when using the shell executor - Take 2 [!4896](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4896) - Fix default log format to FormatRunner [!4910](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4910) - Use pull policy configuration for services [!4854](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4854) - Upgrade fleeting: fix tcpchan deadline for tunnelled connections [!4917](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4917) - Cancel stage script upon job cancellation in attach mode [!4813](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4813) - Reset log level and format to default values when not set [!4897](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4897) - Prevent additional newline in job in attach mode [!4901](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4901) ### Maintenance - Fix formatting in runner registration doc [!4921](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4921) - Remove funlen [!4912](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4912) - Add a Runner Docker image tag for the build's revision [!4862](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4862) - Apply split Vale rules to project [!4918](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4918) - Skip `TestRunCommand_configReloading` unit test on windows platform [!4916](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4916) - Re-enable Windows Docker Git-lfs tests [!4900](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4900) - Remove Git 1.8.3.1 test [!4856](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4856) - Merge back 16.11, 17.0, 17.1 and 17.2 patches in main branch [!4905](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4905) - Work around a syntax highlighting problem [!4920](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4920) - Remove Beta plugin warning for AWS [!4919](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4919) - Clarify where to install the fleeting plugin [!4894](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4894) - Docs maintenance: Add internal pages to ignore list [!4895](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4895) - [k8s] Fix `testKubernetesWithNonRootSecurityContext` integration tests [!4892](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4892) - Remove unneeded notes in tabs [!4922](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4922) - Bump UBI base image from `9.4-1134` to `9.4-1194` [!4909](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4909) - Add runner path requirement [!4904](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4904) - Update index.md [!4908](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4908) - Add missing requirement for network_mtu [!4890](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4890) - Add note about using Workload Identity Federation for GKE [!4884](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4884) (Nabil ZOUABI @nabil_zouabi) - [k8s] Fix `CaptureServiceLogs` integration tests [!4891](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4891) ## v17.2.1 (2024-07-25) ### Bug fixes - Cancel stage script upon job cancellation in attach mode !4813 ## v17.1.1 (2024-07-25) ### Bug fixes - Cancel stage script upon job cancellation in attach mode !4813 ### Maintenance - Remove Git 1.8.3.1 test !4856 ## v17.0.2 (2024-07-25) ### Bug fixes - Cancel stage script upon job cancellation in attach mode !4813 ## v16.11.3 (2024-07-25) ### Bug fixes - Cancel stage script upon job cancellation in attach mode !4813 ## v17.2.0 (2024-07-18) ### New features - Add mount propagation volume option for Kubernetes executor !4784 (Subhashis Suara @subhashissuara) - Always attempt to retrieve pod warning events !4852 - Ability to turn on/off running umask 0000 command for Kubernetes executor !4842 - Make directories for scripts and logs configurable for k8s Executor !4805 (Thomas John Randowski @WojoInc) ### Bug fixes - Ensure that dialing fleeting instance can be canceled !4874 - Upgrade fleeting/taskscaler: fixes reservation/unavailability instance churn !4865 - Shorten network names created by FF_NETWORK_PER_BUILD !4830 (Ricardo Katz @rikatz) - Fix Kubernetes executor runner API permission !4800 (EuVen @euven) - Upgrade fleeting and taskscaler to fix instance churn/runaway !4843 - Fix default runner alpine flavour version !4850 - Upgrade fleeting and taskscaler to fix instance churn/runaway !4844 - Update clear-Docker-cache script to keep cache volumes !4847 - Set file permissions before creating runner rpm/deb packages !4835 - Fix parsing of --Kubernetes-host_aliases command-line flag !4837 ### Maintenance - Change product lifecycle status of the Fleeting Plugin for AWS from Beta to generally available !4887 - Manually update linting configuration and update Vale version !4881 - Update docs for modifying Git LFS endpoints !4886 - Add note about Runner install requirement !4885 - Update dependency danger-review to v1.4.1 !4860 - Docs maintenance: Fix 404 links !4882 - Fix periods example in [[runners.autoscaler.policy]] documentation. !4863 (Jan Jörke @janjoerke) - Add note that AWS and GCP Fleeting plugins is beta !4879 - Bump golang to 1.22.5 !4878 - Fix more k8s integration test !4869 - Manually update Vale and markdownlint rules and fix docs !4873 - Add note to runner autoscaling for runner manager on fault tolerant design !4870 - Add note to runner autoscaling for runner manager on preemptive mode !4871 - Add note on the runner `tls_verify` setting to the docs !4872 - Add note to runner autoscaling on use of credentials file !4868 - Add note to runner autoscaling for runner manager on containers !4867 - Fix testKubernetesPodEvents integration test !4866 - Fix link that is broken when published !4864 - Bump Docker+machine version to v0.16.2-GitLab.27 !4859 - Use the danger-review component !4828 - Adress CVE-2024-6104 in GitHub.com/hashicorp/go-retryablehttp !4858 - Bump UBI bases image from `9.4-949` to `9.4-1134` !4857 - Remove Git 1.8.3.1 test !4856 - Add note about `FF_ENABLE_JOB_CLEANUP` feature flag !4849 - Suppress golangci-lint config deprecations and warnings !4845 - Fix some external links in the project !4851 - Document k8s executors for read-only root filesystem !4848 - Replace sysvinit-adjacent commands with systemd equivalents !4841 - Bump to Go 1.22.4 !4838 - Specify in which scenarios missing index has been seen !4839 - Create issue template for bumping golang !4840 - Use upstream spdystream again !4836 ## v17.0.1 (2024-07-05) ### Bug fixes - Upgrade fleeting and taskscaler to fix instance churn/runaway !4844 ### Maintenance - Remove Git 1.8.3.1 test !4856 ## v16.11.2 (2024-07-05) ### Bug fixes - Upgrade fleeting and taskscaler to fix instance churn/runaway !4843 ### Maintenance - Remove Git 1.8.3.1 test !4856 ## v17.1.0 (2024-06-20) ### New features - Add AFTER_SCRIPT_IGNORE_ERRORS variable allow not ignoring after_script errors !4758 (Tim @timmmm) - Add Kubernetes configurable retry backoff ceiling !4790 (Nabil ZOUABI @nabil_zouabi) - Remove Beta from runner autoscaler supported public cloud docs section !4823 - Update Fleeting docs to reflect GCP plugin transition to GA !4820 - Change status of Docker Autoscaler and Instance to GA !4821 - Log ETag of extracted cache archive if available !4769 - Allow reading run-single configuration from a config file !4789 (Tobias Ribizel @upsj) - Add steps shim !4803 ### Bug fixes - Fix panic when err is nil on retry for k8s executor !4834 - Fix linter violation !4827 - Support handling Docker images with @digest !4557 - Fix Docker client intermittently failing to connect to unix socket !4801 - Override helper images entrypoint on Docker import !4793 - Fix jsonschema validation warning for Docker services_limit !4782 (Malte Morgenstern @malmor) - Propagate exit codes through nested pwsh calls !4715 (Andy Durant @AJDurant) - Fix jsonschema validation warning for Kubernetes retry_limits !4786 (Malte Morgenstern @malmor) ### Maintenance - Add exact commands to fix signed by unknown authority !4833 - Document troubleshooting steps for 500 error creating tags !4831 - Remove BETA from the autoscaler docs !4832 - Upgrade GitHub.com/MinIO/MinIO-go to v7.0.70 !4819 (Mathieu Quesnel @xmath279) - Remove GitHub.com/samber/lo dependency from main application !4811 - Update Docker Autoscaler executor docs !4822 - Enable tarzstd archive format for caches !4807 - Bump Docker+machine version to v0.16.2-GitLab.26 !4816 - Upgrade ubi fips base image from ubi8 to ubi9 !4814 - Check links in more files !4815 - Upgrade helper image Git-lfs to 3.5.1 !4812 - Update runner registration documentation !4809 - Update docs linting Docker images !4806 - Add note about KMS Alias syntax - Documentation !4792 - Fix external links in docs !4802 - Remove trailing whitespace !4799 - Bump to Go 1.22.3 !4795 - Move docs-related CI jobs to the same file !4787 - Docs: match example to text !4794 (Anton Dollmaier @a.dollmaier) - Bump to Go 1.22.2 !4759 - Use lowercase for beta and experiment !4788 - Made beta and experiment lowercase !4785 ## v17.0.0 (2024-05-16) ### New features - Add fleeting docs describing new plugin installation method !4749 - Support Google Cloud secrets from projects other than the one containing the WIF pool !4718 (Rich Wareham @rjw57) - Interpret failed pods as system failures rather than script failures for Kubernetes executor !4698 (Daniel Barnes @dbarnes3) - Implement new GCS Cache adapter that uses Google Cloud SDKs auth discovery defaults !4706 - Add cpuset_mems option to Docker executor !4725 (Karthik Natarajan @karthiknatarajan) - Add docs for Runner Operator in disconnected network environments !4716 - Add support for taskscaler scale throttle !4722 - Add the ability to disable the automatic token rotation !4721 ### Security fixes - Stop installing tar in ubi fips base image !4703 ### Bug fixes - Revert "Merge remote-tracking branch 'origin/16-11-stable'" !4761 - Upgrade fleeting and taskscaler for fixes !4745 - Upgrade fleeting and taskscaler for fixes !4745 - Remove runner from config when unregister with token !4750 (Karthik Natarajan @karthiknatarajan) - Correctly set CI_JOB_STATUS to timedout/canceled when appropriate !4717 - Fix fleeting install subcommand for Windows !4753 - Fix fleeting install subcommand for Windows !4753 - Correctly set CI_JOB_STATUS to timedout/canceled when appropriate !4717 - Upgrade fleeting-artifact to fix plugin installation !4748 - Fix buildlogger write race !4734 - Upgrade fleeting-artifact to fix plugin installation !4748 - Fix buildlogger write race !4734 ### Maintenance - Require ci prep before incept !4762 - Upgrade runner-linters image and golangci-lint for Go 1.22 !4776 - Improve upgrade docs !4780 - Clean up docs redirects, runner - 2024-05-02 !4756 - Added missing apostrophe !4781 - State clearly one job = one VM = one container !4774 - Add information regarding Beta feature !4757 - Updating docs tier badge rules !4779 - Fix broken link and typo !4775 - Add badge info to autoscaler page !4772 - Provide examples and clarify how MachineOptions work for the MachineDriver !4768 - Update GitLab Runner Version !4773 - Fix stuck Windows 1809 jobs !4771 - Remove unsupported GitLab versions from Verify Runner group docs !4764 - Remove support for old pre_clone_script and post_clone_script configuration settings !4767 - Clarify "circular" docs links !4738 - Remove slsa_v02 from artifact_metadata !4760 - Remove cmd shell !4754 - Remove shared runner naming from GitLab Runner docs !4744 - Switch to Lychee for link checking !4737 - Convert custom Kubernetes error to retryError !4662 (Nabil ZOUABI @nabil_zouabi) - Remove license scanning template !4735 - Fix jsonschema validation warning for monitoring struct !4724 (Malte Morgenstern @malmor) - Updated examples to avoid Helm error !4752 - April: fixes trailing whitespace in GitLab Runner project !4751 - Remove legacy shell quoting and FF_USE_NEW_SHELL_ESCAPE feature flag !4742 - Remove `gitlab-runner exec` command !4740 - Add SSH: handshake failed to troubleshooting !4743 - Update autoscaler config option documentation !4730 - Fix dead documentation anchor links in README !4733 (Markus Heidelberg @marcows) - Remove terminationGracePeriodSeconds !4739 - Remove license scanning template !4735 - Add prerequisite info to runner registration token section !4714 - Document the Beta status of the Google Cloud plugin for fleeting !4726 - Add security risk on runner debug for Shell executors !4586 ## v16.11.1 (2024-05-03) ### Bug fixes - Upgrade fleeting and taskscaler for fixes !4745 - Correctly set CI_JOB_STATUS to timedout/canceled when appropriate !4717 - Fix fleeting install subcommand for Windows !4753 - Upgrade fleeting-artifact to fix plugin installation !4748 - Fix buildlogger write race !4734 ### Maintenance - Remove license scanning template !4735 ## v16.11.0 (2024-04-18) ### New features - Taskscaler and fleeting now use logrus logger !4719 - Add autoscaler instance_ready_command option !4709 - Implement timestamped logs !4591 - Add Jfrog Artifactory Secretes engine as External Secrets option in Hashicorp Vault !4486 (Ivo Ivanov @urbanwax) - Add fleeting subcommand to manage fleeting plugins !4690 - Extend GitInfo with RepoObjectFormat to store object format !4645 ### Bug fixes - Fix log timestamps fixed format !4712 - Avoid errors when creating files in pwsh 2.0 !4661 (Robin Lambertz @roblabla) - Give up on the trace finalUpdate if it keeps on failing !4692 - Fix test coverage report job !4701 - Fix Makefile.build.mk for armv7l (#36976) !4682 (Alexander Puck Neuwirth @APN-Pucky) - Rename SSH.Config to common.SshConfig to avoid misleading config.TOML validation error !4694 - Update GO_FIPS_VERSION to match GO_VERSION !4687 - Revert "Merge branch 'avonbertoldi/27443/graceful-shutdown-shell' into 'main'" !4686 ### Maintenance - Bump Go version to 1.21.9 !4711 - Re-enable SAST scanning !4683 - Update a few dependencies !4700 - docs: Remove period in the middle of a sentence !4708 (Markus Heidelberg @marcows) - Runner: Updates docs-lint image for new SVG use !4697 - Remove extra square brackets in podman section !4705 - Check Docker version before using deprecated arg !4699 (Anthony Juckel @ajuckel) - Change beta to pre in version string !4681 - Upgrade GitHub.com/BurntSushi/TOML to v1.3.2 !4695 - Docs Update - Missing Hosted Runner Renaming !4693 - Use fork of moby/spdystream to fix race condition !4685 - Fix typo in note !4691 ## v16.10.0 (2024-03-21) ### New features - [Experimental] Define monitoring threshold for job queue duration !4480 - Enable feature cancel_gracefully !4655 - Add support for cgroup_parent setting on Docker executor !4652 (Stefano Tenuta @stenuta) - Add runner token to header !4643 - Add support for isolating jobs to individual namespaces !4519 (Markus Kostrzewski @MKostrzewski) ### Security fixes - FedRAMP/CVE: Don't install wget in ubi images !4660 ### Bug fixes - Revert "Merge branch 'avonbertoldi/27443/graceful-shutdown-shell' into 'main'" !4686 - Build is now canceled if autoscaled instance disappears !4669 - Add jobs to compile all tests !4651 - Set UseWindowsLegacyProcessStrategy to false by default !4659 - Really silence error when `exec`ing on container that does not exists !4665 - Gracefully stop long running processes when using the shell executor !4601 - Call Value() instead of Get() when validating variables !4647 - Call Value() instead of Get() when validating variables !4647 - Fix get IP on parallels executor on macOS intel !4642 (Carlos Lapao @cjlapao) - Fix fallback_key for local cache !4349 (Andreas Bachlechner @andrbach) - Revert default runner script timeout !4621 ### Maintenance - Update windows support section !4641 - Upgrade fleeting library !4679 - Document connection_max_age parameter !4678 - Remove broken link to Kubernetes docs' emptyDir !4656 (Victor M. @victoremepunto) - Bump Docker+machine version to v0.16.2-GitLab.25 !4676 - Document how to troubleshoot Docker Machine issues !4677 - Update plugin status, link timeline/epic !4674 - Runner: updates last two redirecting links !4675 - Fix typo (Telsa -> Tesla) !4673 (Per Lundberg @perlun) - Enabling Vale for badges !4671 - Service_linux.go: Remove non-existent syslog.target from service file !4667 (Martin @C0rn3j) - Refactor network.newClient to use Go functional option pattern !4648 - Finishes link fixes in Runner docs !4670 - Fix Experimental -> Beta references !4668 (Per Lundberg @perlun) - Updating SaaS to be .com !4666 - Update runner sizes !4664 - Fix reference to project issue !4663 - Skip TestBuildGitFetchStrategyFallback on Windows cmd !4653 - Update no-trailing-spaces configuration for consistency !4658 - Remove unneeded trailing spaces !4644 - Skip TestBuildGitFetchStrategyFallback on Windows cmd !4653 - Restructure Kubernetes executor page part 2 !4650 - Restructure Kubernetes executor page !4649 - Add-vale-plugin-to-runner-dev-env-setup !4639 - Update usage of GCP to Google Cloud !4623 - Git_LFS_VERSION is no longer required to be set !4636 (Matthew Bradburn @mbradburn-ext) ## v16.9.1 (2024-02-28) ### Bug fixes - Call Value() instead of Get() when validating variables !4647 ### Maintenance - Skip TestBuildGitFetchStrategyFallback on Windows cmd !4653 ## v16.9.0 (2024-03-01) ### New features - Add Kubernetes configurable retry limits !4618 - Support cancelation of job script with resumed execution of later stages !4578 - Add support for s3 accelerate in runner cache !4313 (ArtyMaury @ArtyMaury) - Kubernetes: add automount_service_account_token option !4543 (Thorsten Banhart @banhartt) - Register runner using Docker exec !4334 (Zhiliang @ZhiliangWu) - Clear worktree on Git fetch failures !4216 (Tim @timmmm) - Kubernetes: add option to skip explicit imagePullSecrets configuration !3517 (Miao Wang @shankerwangmiao) - Limit number, memory and cpu of services container for Docker runners !3804 (Kevin Goslar @kev.go) - Provide early build setting validation !4611 - Allow FreeBSD to be used with Docker executor (unofficially supported) !4551 (Ben Cooksley @bcooksley) - Add support for service health checks for the Kubernetes executor !4545 - Limit the max age of a TLS keepalive connection !4537 - Retry on tls: internal error message for k8s executor !4608 - Retry on connection refused k8s error message !4605 - Increment package build number !4595 - Make Kubernetes API retries configurable !4523 (Michał Skibicki @m.skibicki) - Add support for Node Tolerations overwrite !4566 (Marc Ostrow @marc.ostrow) - Rewrite ci package script to mage !4593 ### Security fixes - Address CVE-2023-48795 - upgrade golang.org/x/crypto !4573 ### Bug fixes - Call Value() instead of Get() when validating variables !4647 - Correctly use volume driver for all volumes !4579 (Mitar @mitar) - Revert default runner script timeout !4621 - Avoid recursing into submodules on checkout and fetch !3463 (Ciprian Daniel Petrisor @dciprian.petrisor) - Fix edge case where Git submodule sync is not being called !4619 - Fix file variable quoting issue with cmd shell !4528 (Robin Lambertz @roblabla) - Allow zero value for cleanup_grace_period_seconds !4617 - Use Windows job to improve process termination !4525 - Helper image container should always use native platform !4581 - Helper image container should always use native platform !4581 ### Maintenance - Skip TestBuildGitFetchStrategyFallback on Windows cmd !4653 - Fix warning event integration test !4633 - Retry package cloud push on HTTP error 520 !4635 - Allow explicit cpu/memory service resources overwrites for the Kubernetes executor !4626 (Tarun Khandelwal @tkhandelwal1) - Fix rpmsign invocation by quoting all arguments !4632 - Fix verify stable resources job !4630 - Fix rpmsign invocation again !4631 - Fixing badge format !4629 - Explain side-effect of using pre_build_script commands !4627 - Create separate Kubernetes troubleshooting page !4622 - Vale and Markdown rule refresh for project !4620 - Update Go version to v1.21.7 !4458 - Check directives script ignores .tmp dir !4615 - Fix rpmsign command invocation !4614 - Retry bad gateway errors for package cloud jobs !4606 - Restore argument "NoProfile" for PowerShell in encoded command !4427 (Alexandr Timoshenko @saw.friendship) - Add Apple Silicon support to Parallels Executor !4580 (Carlos Lapao @cjlapao) - Update alpine Docker tag !4603 - Fully implement markdownlint-cli2 in project !4610 - Update Docker+machine version to v0.16.2-GitLab.24 !4609 - Add ~"Category:Runner Core" to bug issue template !4612 - Housekeeping docs update !4604 - Resolve merge conflicts for Updated documentation S3 endpoints to support IPv6 !4602 - Remove removed feature from docs !4594 - Replace old handbook URLs !4554 - Change file name usage in docs per word list !4596 - Remove timeout and retry of package-deb and package-rpm jobs !4597 - Update version notes to new style - Runner !4590 - Update Harbor self-signed certificate x509: unknown Certificate Authority gotcha !4321 - Add specific steps on secret creation !4589 - Clean up docs redirects, runner - 2024-01-22 !4588 - Update persona links to handbook subdomain !4587 - Update `k8s dumb-init` FF doc to convey its support in both Kubernetes modes !4582 - Update `k8s dumb-init` FF doc to convey its support in both Kubernetes modes !4582 - Fix the architecture of pwsh x86-64 helper images !4559 ## v16.8.0 (2024-02-27) ### New features - Set default runner script timeout to allow after_script !4491 - Move PodSpec feature to beta !4568 - Allow IAM Session Tokens for S3 cache client credentials !4526 (Mike Heyns @mike.heyns) - Add allowed_users config for Docker executor !4550 - Add GCP Secret Manager secrets integration !4512 ### Bug fixes - Revert default runner script timeout !4621 - Helper image container should always use native platform !4581 - Delete cache dirs after failed extraction !4565 - Truncate runner token so it won't get logged !4521 (Matthew Bradburn @mbradburn-ext) - Allow empty string on emptyDir volume size !4564 - Support default paths on Windows for custom clone path !2122 (Ben Boeckel @ben.boeckel) - Hide Docker executor init behind a feature flag !4488 - Revert "Add custom entrypoint for the build container for Kubernetes executor" changes !4535 ### Maintenance - Build images with `bleeding` Postfix rather than `main` !4583 - Use version instead of sha commit to reference helper images !4558 - Update glossary !4574 - Remove alpine 315 !4575 - Add alpine 3.19 !4561 - Fix FPM building RPM packages !4560 - Update `k8s dumb-init` FF doc to convey its support in both Kubernetes modes !4582 - Rebuild CI image !4576 - Change update to upgrade for 'Update GitLab Runner' !4572 - Add omitempty to allowed_users runner config spec !4571 - Helm documentation for ImagePullSecrets less confusing !4536 (Baptiste Lalanne @BaptisteLalanne) - Document hostname length issue when using Docker-machine !4518 (Andrés Delfino @andredelfino) - Removing docs Vale rule !4567 - Fix the architecture of pwsh x86-64 helper images !4559 - Create subtests for each allowed image !4540 (Zubeen @syedzubeen) - Changing title to active verb !4563 - Updating title to be verb !4562 - Adding metadata descriptions !4556 - Document runner managers and system_ID !4549 - Add section for unhealthy configuration !4552 - Add `grep` as a dependency when overriding an image's ENTRYPOINT !4553 - Clarify / revise GitLab-runner SIGQUIT config !4548 - Update to go 1.21.5 !4541 - Add missing Docker configuration for Docker-autoscaler !4534 (Nabil ZOUABI @nabil_zouabi) ## v16.7.0 (2023-12-21) ### Bug fixes - Helper image container should always use native platform !4581 ### Maintenance - Update `k8s dumb-init` FF doc to convey its support in both Kubernetes modes !4582 ## v16.6.2 (2023-12-21) ### Bug fixes - Revert "Add custom entrypoint for the build container for Kubernetes executor" changes !4535 - Improve the collapsible element logic !4487 - Avoid SIGTERM propagation to processes on Windows OS !4524 - Fix PowerShell native command error output with Kubernetes executor !4474 (Matthew Bradburn @mbradburn-ext) - Use -File to improve pwsh exit status !4468 (Matthew Bradburn @mbradburn-ext) - Add a better handling of signal on both Helper and Build container for k8s executor in exec mode !4485 - Fix broken main !4499 - Hide Docker executor init behind a feature flag !4488 - Hide Docker executor init behind a feature flag !4488 - Make TestDockerBuildContainerGracefulShutdown less flaky !4479 ### Maintenance - Update alpine Docker tag !4167 - Fix orphaned links for Autoscaling GitLab Runner on AWS EC2 docs page !3575 - Fix flaky resolver_URL_test.go due to lack of cleanup !4542 - Fix broken link !4539 - Troubleshoot more "No URL provided" cases !4502 - Move section in Kubernetes executor page !4538 - Update alpha to experiment in k8s executor page !4532 - Add support for Windows 11 23H2 !4504 (Matthew Bradburn @mbradburn-ext) - Add troubleshooting for Docker connect failed !4516 (Matthew Bradburn @mbradburn-ext) - Document limitation in gcs-fuse-csi-driver for mounting volumes in init container !4527 - Exclude empty slices during the validation of the config.TOML !4520 - Improve Docker executor platform option integration test !4489 - Add 204 error troubleshooting steps to the k8s executor docs !4508 - Upgrade fleeting and taskscaler !4510 - Add clarification about feature flags usage !4503 - Clarify ability to set other non-root user ids for k8s runner !4513 - Update "filename" to "file name" !4515 - Rewrite Image building to Mage to export them for verification !4295 - Update links to TW team handbook page !4511 - Generate k8s API permissions docs !4442 - Separate trace/job log streams !3983 - Delete docs marked for removal !4507 - Change RBAC option from "enable" to "create" !4506 (Chen Wu @wuchen) - Clarify user membership for Docker !4498 - Change "Experiment` to Beta in supported public cloud instances table !4492 - Revert "Merge branch 'less-verbose-logging' into 'main'" !4496 - Make autoscaler integration tests pass !4497 - Make autoscaler integration tests pass !4497 - Cross-link Docker in Docker TLS configuration docs !4495 - Bump some test timeouts !4490 - Doc | Add new error to the troubleshooting section of instance executor !4475 - Improve formatting !4484 (Ben Bodenmiller @bbodenmiller) - Clarify process tree in kuberenetes build container !4482 - Recommend a mountpoint other than /Users/Shared !4478 (Matthew Bradburn @mbradburn-ext) - Retry package-deb and package-rpm when job times out !4481 - Bump some test timeouts !4471 ## v16.6.1 (2023-11-24) ### Bug fixes - Hide Docker executor init behind a feature flag !4488 ### Maintenance - Make autoscaler integration tests pass !4497 ## v16.6.0 (2023-11-16) ### New features - feat: allow specifying image platform to pull images !3916 (Muhammed Ali @ar-mali) - Docker executor: Add configuration to include Docker's `--group-add` !4459 (Ben Brown @benjamb) - Add custom entrypoint for the build container for Kubernetes executor !4394 (Baptiste Lalanne @BaptisteLalanne) - Prevent logging every connection to the instance when using an autoscaler !4332 (Mattias Michaux @mollux) - Add SizeLimit option to emptyDir volumes for Kubernetes executor !4410 - Enable Git transfer.bundleURI by default !4418 ### Security fixes - Update various images to use latest Docker-machine version !4454 - Update some dependencies to resolve vulnerabilities !4453 ### Bug fixes - Implement graceful build container shutdown for Docker executor !4446 - Add a better handling of signal on both Helper and Build container for k8s executor in attach mode !4443 - Add a mutex to sync access to sentryLogHook !4450 (Matthew Bradburn @mbradburn-ext) - Use lchmod for zip extract !4437 (Matthew Bradburn @mbradburn-ext) - Don't use Docker links for user-defined networks !4092 - Fix compilation of Kubernetes integration tests !4455 - Sanitize image entrypoint to remove empty string !4452 - Manually refresh JobVariables prior to ConfigExec !4379 (Paul Bryant @paulbry) - Fix file secrets in debug terminal !4423 (Matthew Bradburn @mbradburn-ext) - Fix labeling of the GitLab_runner_failed_jobs_total metric !4433 - Fix Azure key vault JWT convert bug !4396 (Zehua Zhang @zhzhang93) ### Maintenance - Doc | Fix typo: rename key_pathname to key_path !4476 - Add a link to runner tutorial !4467 - docs: Use "prerequisites," plural (Runner) !4473 - Clarify PowerShell defaults !4470 (Matthew Bradburn @mbradburn-ext) - Change Docker and instance executor from experiment to beta !4463 - Skip instance executor tests for cmd !4462 - Removed deprecated link !4461 - Use latest Technical Writing images !4449 - Misc test fixes !4460 - Add link to forceful shutdown definition !4445 - Add basic Azure instance/Docker autoscaler examples !4451 - Update versions in documentation !4457 - Update runner_autoscale_aws documentation with required iam:PassRole !4286 (Sjoerd Smink @sjoerdsmink) - Add Docker Autoscaler and Instance executor integration tests !4402 - Refactor the retry interface to be generic !4422 - Update CI_IMAGE to include Debian image flavor !4447 - Fix sync_Docker_images test not building !4448 - Change instance, Docker autoscaler and AWS plugin to BETA !4432 - Update gocloud.dev to v0.34.0 !4430 - Doc | Add sample command for creating Docker machines for troubleshooting !4444 - Update imagePullSecrets documentation !4440 - Add upgrade troubleshooting info to Runner docs !3968 - Update information regarding new runner creation workflow !4436 - Merge "Example" page into register runners page !4413 - Add tip about No URL provided !4435 (Matthew Bradburn @mbradburn-ext) - Set test build timeout to the DefaultTimeout value !4439 - Add a support policy page for GitLab Runner support policies !4434 - Reduce timeout for package-deb/rpm jobs to 30 minutes !4431 - Fix usage of 'build' !4429 - Fix formatting in Docker Autoscaler executor page !4428 - Clarify how FF_USE_POD_ACTIVE_DEADLINE_SECONDS works !4424 (Ben Bodenmiller @bbodenmiller) - Update runner version reference !4426 ## v16.5.0 (2023-10-20) ### New features - Print Kubernetes Pod events !4420 - Support of multi-line command output in job terminal output view for bash shell when FF_SCRIPT_SECTIONS is enabled !3486 ### Security fixes - Install Git and Git-lfs via package manager in ubi.fips.base image !4405 - Run `apk upgrade` in runner alpine images !4378 ### Bug fixes - Docker-machine: Ensure runner stays under limit when IdleCount is 0 !4314 - When single-quoting, don't also quote with backtick with PowerShell/pwsh !4387 (Matthew Bradburn @mbradburn) - Add config to autoset Helper Image ARCH and OS !4386 - Add missing findutils package to ubi-base image !4414 - Set `FF_USE_POD_ACTIVE_DEADLINE_SECONDS` default value to `true` !4361 - Retrieve script exit command after execution !4397 - Add missing runtime packages to ubi base image !4359 - Fix the repository cloning error on Windows with `cmd` shell executor !4341 - Fix PowerShell SourceEnv permission failure !4369 - Fix PowerShell SourceEnv permission failure !4369 ### Maintenance - Display the stage command exit code when debug log is enabled !4421 - Fix docs typo !4419 (Alex @AJIOB) - Downgrade CI image to use Debian bullseye instead of bookworm !4417 - Enhance debug secrets warning in documentation !4415 (Matthew Bradburn @mbradburn-ext) - Add missing rbac when debugging services !4412 (Ismael Posada Trobo @iposadat) - Docs: point users to Docker-machine fork that successfully handles EC2 fleet spot instance requests !4403 - Remove note on no-support for Windows system certificate store !4409 (Taisuke 'Jeff' Inoue @jeffi7) - Remove spaces from FF_NETWORK_PER_BUILD environment variable example !4416 - Use latest linter image in relevant pipelines !4411 - Part 3: CTRT edits for registering runners !4392 - Upgrade Go to version 1.20.10 !4348 - Remove WithBrokenGitSSLCAInfo tests as they no longer function as expected !4408 - Update file Kubernetes.md !4393 (Thomas Spear @tspearconquest) - Detail how to output stdout for WSL on windows !4370 - Add docs about Kubernetes overrides by CI variables !4222 - Lock `gitlab-dangerfiles` to 4.1.0 !4401 - Add link to Azure plugin releases to the instance executor documentation !4363 - Add link to Azure plugin releases to the Docker autoscaler executor !4364 - CTRT register runners prt2 v2 !4395 - Adding dial tcp timeout !4389 - Update documentation to reflect use of runner-token !4390 - Update PACKAGE_CLOUD variable default value !4342 - Improve documentation regarding runner unregistration !4338 - CTRT Part 1: Registering runners page !4371 - Add documentation issue template to project !4382 - Run apk upgrade in image used to build images !4381 - Style and language improvements for Advanced configuration docs !4377 - Improve error messages that are emitted by tasks !4344 (Taliesin Millhouse @strongishllama) - Add links to Trusting TLS certificates paragraphs !4376 - Enable configuration of MTU in Docker executor !3576 (Jasmin @nachtjasmin) - fix: Sets some http headers to use constants !4355 - Update default Git_LFS_VERSION !4372 (Matthew Bradburn @mbradburn) - Git_LFS_VERSION must be specified when running make at the command line. !4360 (Matthew Bradburn @mbradburn) - Fixed nvidia-smi typo !4367 (Alexander Hallard @zanda8893) ## v16.4.0 (2023-09-25) ### New features - Add script/after script timeout configuration via variables !4335 - Distinguish job failure in worker processing failures metric !4304 - Expose queueing duration histogram metric !3499 ### Security fixes - Clean up manual installation of Git and Git-lfs in ubi base image and bump Git-lfs version to 3.4.0 !4289 - Runner-helper fips image cleanups !4308 - Bump Git-lfs version to 3.4.0 !4296 - Clean up manual installation of Git and Git-lfs in ubi base image and bump Git-lfs version to 3.4.0 !4289 - Runner-helper fips image cleanups !4308 - Bump Git-lfs version to 3.4.0 !4296 ### Bug fixes - Fix PowerShell SourceEnv permission failure !4369 - Fixed: init-permissions takes too long for windows volumes !4324 (OK_MF @OK_MF) - Switch deletion propagation to background for Pod's dependents !4339 - Do not propagate Build context to k8s executor cleanup method !4328 - Fix error when unmarshaling string with windows path for PowerShell and pwsh !4315 - Automatically set GOMEMLIMIT based on memory cgroup quotas !4312 - Do not propagate Build context to k8s executor cleanup method !4328 - Fix error when unmarshaling string with windows path for PowerShell and pwsh !4315 - Update fleeting and taskscaler to newest versions !4303 - Forward URL rewrite in lfs pull !4234 (François Leurent @131) ### Maintenance - Set FF_RESOLVE_FULL_TLS_CHAIN to false by default !4292 - Generate packagecloud packages with Mage !4323 - Fix pass env cmd test !4365 - Refactor content for Docker autoscaler executor page !4354 - Update runner registration token deprecation link !4357 - Enable pushing to ECR and DockerHub !4353 - Improve documentation about pod_annotations !4336 - Use ADD to download Windows dockerfile dependencies !4346 - Use ADD to download Windows dockerfile dependencies !4346 - Fix link in documentation to avoid redirects !4347 - Remove trailing whitespace from documentation !4343 - Discourage `gitlab-runner restart` within containers !4331 (Benedikt Franke @spawnia) - Add info about config.TOML file !4333 - Update binary version !4330 - Remove configmap section !4329 - Fix FF_USE_PowerShell_PATH_RESOLVER env var value !4327 - Remove disclaimer from putting runner tokens in secrets !4319 - Update nav steps !4310 - Add note about empty runner-registration-token !4276 - Simplify issue templates and add labels !4275 - Fix links that are redirecting in docs !4311 - Add Openshift4.3.8 and later anyuid SCC !4306 - Add FIPS-compliant helper images and binaries to S3 sync job !4302 - Refresh Vale rules and link checking Docker image !4299 ## v16.3.1 (2023-09-14) ### Security fixes - Clean up manual installation of Git and Git-lfs in ubi base image and bump Git-lfs version to 3.4.0 !4289 - Runner-helper fips image cleanups !4308 - Bump Git-lfs version to 3.4.0 !4296 ### Bug fixes - Do not propagate Build context to k8s executor cleanup method !4328 - Fix error when unmarshaling string with windows path for PowerShell and pwsh !4315 ### Maintenance - Use ADD to download Windows dockerfile dependencies !4346 ## v16.3.0 (2023-08-20) ### New features - Enable variable injection to Persistent Volume Claim name !4256 (OK_MF @OK_MF) - Add `http2: client connection lost` for k8s API retry !4285 - Add debug message to diagnose fetching issuer certificate bug !4274 - Add RISC-V support !4226 (Aaron Dewes @AaronDewes) - Add link to documentation when using forbidden arguments in register !4266 - Add `connect: connection timed out` for k8s API retry !4257 - Put warning event retrieval feature behind a Feature Flag !4246 ### Bug fixes - Fix cmd escaping/quoting of parentheses !4301 - Revert "Prune tags when fetching" !4300 - Use Git --unshallow when Git_DEPTH is zero !4288 - Fix Docker Cleanup() panic when nothing has been configured !4287 - Mark project working directory as safe for Git !3538 - Only decode certificates if HTTP GET is successful !4281 - Panic during build now prints stack trace !4283 - Retry sync and update submodules on failure !4278 - Fix Docker ulimit validation warning !4248 (Dennis Voss @DennisVoss) - Fix script typo that caused packages not to be pushed to Packagecloud !4253 ### Maintenance - Adding All to tier badges !4297 - Add RBAC as required config !4293 - Fix whitespace in docs !4291 - Fix typos !4284 (Sven Strickroth @mrtux) - Include first multiline commit in MR description for default template !4282 - Update Docker dependencies version to fix invalid Host header !4249 (Sword @RryLee) - Update fleeting and taskscaler to newer versions !4280 - Propagate Kubernetes executor context !4125 - Prune tags when fetching !4218 (Guilhem Bonnefille @gbonnefille) - Rename runner token to runner authentication token !4264 - Fix documentation to work with Runner Helm Chart v0.53.0 onwards !4269 (Konstantin Köhring @konstantin.koehring) - Provide guidance on minimal permissions needed for EC2 autoscaling !4175 - Doc | Add troubleshooting steps for private registry ssl errors !4267 - Update link to EKS !4268 - Add space before backtick !4265 - Add Vale to .tool-versions file !4252 - Add K8s and Docker logging location to troubleshooting !4262 - Add warnings about shell executor !4261 - Include steps to enable metrics for Runners using Helm Chart !4260 - Update installation type references for docs !4258 - Fix potential race condition in Docker provider test !4244 - Add missing release binaries/images to GitLab release page !4254 - Fix table item !4250 - Restructure executor page !4245 - Ensure Windows helper images builds fail upon error !4243 ## v16.2.0 (2023-07-21) ### New features - Update Runner package repository with OS availability !4215 - Add warning events on failure with k8s executor !4211 - Check forbidden arguments in register command before calling the server !4158 - Ignore forbidden arguments to register if using --registration-token !4157 - Retry all k8s API calls in the runner Kubernetes executor !4143 - Print number of jobs being processed for each new job !4113 - Added zip+zstd and tar+zstd archivers !4107 - Add Azure key vault support !3809 (Zehua Zhang @zhzhang93) ### Security fixes - Do not install Python in ubi-fips-base image !4213 - Build Git-lfs from source in runner ubi-fips image !4206 - Update GitHub.com/Docker/distribution dependency !4205 - Upgrade Go version to 1.20.5 !4179 - Update `ubi8-minimal` image to version `8.8-860` !4171 ### Bug fixes - Downgrade Git from v2.41.0 to v2.40.1 !4236 - Fix misleading error when cache isn't configured !4212 - Fix common build dir implementation in instance executor !4209 - Add documentation to describe runner issue 30769 and its workarounds !4181 - Fix panic for instance executor when instance config option is nil !4173 - Kubernetes executor: prevent background processes from hanging the entire job !4162 (Snaipe @Snaipe) - Fix Docker-autoscaler proxy tunnel for Windows !4161 ### Maintenance - Fix old metadata in docs !4240 - Refactor instance executor docs content !4238 - Fix Git LFS not building !4237 - Fix typo in Docker executor !4235 (Raphaël Joie @raphaeljoie) - Mark integration_k8s as optional temporarily !4233 - Update documentation links !4232 - Update runner reg instructions in macOS setup !4230 - Add links to executor pages !4229 - Remove homebrew from docs to set up runner on MacOS !4227 - Build Git-lfs in the base UBI fips image as multiarch !4219 - Add Troubleshooting Case !4208 - Fix TestStackDumping flaky test and incorrect log output !4207 - Update vale rules and exceptions !4204 - Update text in runner registration page !4203 - Add note for limited config template setting support !4202 - Add documentation for SETFCAP configuration !4183 - Fix flaky k8s TestProcessLogs !4177 - Update to include Runner system requirements !4176 - Upgrade GitHub.com/MinIO/MinIO-go to v7.0.59 !4174 - Fixed outdated URL and type of variable !4168 - Add Crowdstrike troubleshooting guidance !4160 - Emphasize use of runnerToken in Helm chart !4150 - Mark ConfigExecTimeout as optional !4145 (Nikolay Edigaryev @edigaryev) - Propagate build context !4128 - Add troubleshooting section about "permission denied" errors due to helper image user mismatch in k8s executor !3990 ### GitLab Runner distribution - Fix ECR and DockerHub sync !4180 - Fix windows servercore pwsh version and checksums !4178 ## v16.1.0 (2023-06-21) ### New features - Enable variable expansion in fallback cache keys !4152 (René Hernández Remedios @renehernandez) - Automatically set GOMAXPROCS based on CPU Quotas !4142 - Allow Instance executor to use a common build directory !4136 - Pass clean command args to sub modules !4135 (Markus Ferrell @markus.ferrell) - Add dedicated failure reason for image pulling failures !4098 - Support allowed images for privileged jobs and services !4089 (Stéphane Talbot @stalb) - Variable expansion implementation in cache policy field !4085 (René Hernández Remedios @renehernandez) - Use executor's context to enforce timeouts on VirtualBox commands !4026 (Patrick Pirringer @patrick-pirringer) ### Bug fixes - Fix Windows IsRoot() path utility !4153 - Warn if runner with same token being registered multiple times !4122 - Upgrade taskscaler to latest version !4114 - Ensure lock for builds when listing jobs via debug API !4111 - Ensure instance connection is closed when vm isolation is enabled !4108 - Fix community Merge Request pipeline parse errors !4077 (Anthony Juckel @ajuckel) - Fix cache keys processing by improving the handling of the fallback keys !4069 (René Hernández Remedios @renehernandez) ### Maintenance - Docs maintenance: Delete trailing whitespace !4166 - Bump version of markdownlint-cli2 in project !4164 - Correct the filename of configmap !4163 - In UBI-fips helper images remove installation of extra packages since they are... !4159 - Fix k8s integration tests !4156 - Update code example with proper nesting !4155 - Expand Runner Helm chart troubleshooting section !4149 - Update documentation to mention that --registration-token is deprecated !4148 - Improve readability of table !4144 (Bastien ANTOINE @bastantoine) - Upgrade fastzip to v0.1.11 !4141 - Update Runner docs for consistent SaaS runner naming !4138 - Docs maintenance: Update redirects !4134 - Refresh Vale and markdownlint rules !4133 - Add GitLab-runner section to values example !4132 - Removing podman references !4131 - Change heading used to describe reusing an authentication token !4129 - Refactor instance executor page !4124 - Correct example AWS zone used in an example !4123 (Nabil ZOUABI @nabil_zouabi) - Improve formatting !4121 (Ben Bodenmiller @bbodenmiller) - Mention use of runner tokens in Kubernetes runnerRegistrationToken !4120 - Follow up edits instance executor !4119 - Remove trailing spaces !4115 - Update project to use Ruby 3.2.2-based Docker images !4112 - Build Git from source for UBI images !4110 - Make GitLab network client respect Retry-After header !4102 - Documentation versions update !4096 - Improve cacheFile() errors !4078 (Nikolay Edigaryev @edigaryev) - Update alpine and pwsh versions !4072 - Add info about grouped runners to docs !4056 ### GitLab Runner distribution - Sync ci images to dockerhub and ecr !4139 ### Documentation changes - Update nav step !4154 ## v16.0.2 (2023-06-02) ### Bug fixes - Upgrade taskscaler to latest version !4114 ## v15.11.1 (2023-05-25) ### Bug fixes - Fix cache keys processing by improving the handling of the fallback keys !4069 (René Hernández Remedios @renehernandez) ## v16.0.1 (2023-05-24) ### Maintenance - Build Git from source for UBI images !4110 ## v16.0.0 (2023-05-22) ### New features - Add docs how to create an ephemeral PVC !4100 - Update autoscaler image handling !4097 - Send system_ID when unregistering runner !4053 - Consider node selector overwrites for the helper image !4048 (Mike Hobbs @mike554) - Improve autoscaling executor providers shutdown !4035 ### Security fixes - Upgrade GitHub.com/kardianos/service to v1.2.2 !4105 ### Bug fixes - Close connection to instance on nesting client connect fail !4104 - Support health checking multiple service ports for Docker !4079 - Fix helper images being published with the wrong architecture !4073 (Anthony Juckel @ajuckel) - Fix cache keys processing by improving the handling of the fallback keys !4069 (René Hernández Remedios @renehernandez) - Unresolved secrets now return error by default !4064 - Expand container related options before they are used !4002 ### Maintenance - Update Git LFS checksums in release_Docker_images !4106 - Upgrade Git-lfs to 3.3.0 !4101 - Remove note on tested K8s's environments from Runner k8s docs !4087 - Upgrade GitHub.com/Docker/Docker to 23.0.6 !4086 - Remove section: Use the configuration template to set additional options !4084 - Upgrade GitHub.com/emicklei/go-restful/v3 to 3.10.2 !4082 - Update Windows version support policy for Runner !4074 - Fixed link that was breaking UI !4071 - GCP fleeting docs for Instance and Docker Autoscaler executors !4068 - Update alpha to experiment in executor page !4067 - Remove deprecated gosec-sast job !4065 - Update Docker-machine version !4061 - Remove reference to Docker-SSH and Docker-SSH+machine !4060 - Update GPG verification details !4059 - Upgrade GitLab-terminal dependency !4057 - Update grammar, remove extra word !4054 (Rasheed Babatunde @rasheed) - Remove trailing whitespaces !4052 - Make clearer that force send interval is related to logs !4043 - Update redhat/ubi8-minimal Docker tag to v8.7-1107 !4025 - Update version of Docker-machine bundled in runner images !4024 - Add tests for internal autoscaler acquisition !4005 - Use Splitic test runner !3967 - Update golang Docker tag to v1.19.9 !3962 - Update alpine Docker tag !3918 - Remove lll linter !2837 ### Documentation changes - Add idle_time entry to docs !4093 ## v15.11.0 (2023-04-21) ### New features - Add Config Validation section to runner Docs !4017 - Add fine grained configuration of autoscaler histograms !4014 - Update runner internal metrics !4001 - Update taskscaler/fleeting metric collectors configuration !3984 - Reorganize index sections for runner use cases !3980 - Add high-level docs for Instance and Docker Autoscaler executors !3953 - Add Docker-autoscaler. !3885 - Implement fallback cache keys !3875 (René Hernández Remedios @renehernandez) - Support remote Windows Docker environments from Linux hosts !3345 - Add support for absolute submodule URLs !3198 (Nejc Habjan @nejc) - Support for custom Kubernetes PodSpec !3114 ### Bug fixes - Add hostname and find commands to UBI FIPS image !4040 - Remove stray omitempty in long form for --Docker-devices !4029 (Robin Voetter @Snektron) - Interactive terminal: Wait for terminal to be set !4027 - Initialize nesting client lazily !4020 - Handle build's parent context cancelation correctly !4018 - Reduce config validation message noise !4016 - Try all specified TCP ports when doing a service container health-check !4010 - Fix Docker-machine detail races !3999 - Do not ask for registration token if runner token is specified !3995 - Explicitly start Docker service in windows tests !3994 - Resolve "Runner FIPS RPM packages conflicts itself" !3974 - Gracefully terminate Windows processes with Ctrl-C event !3920 (Chris Wright @inkychris) ### Maintenance - Update Docker Autoscaler introduction milestone !4050 - Add missing code block end in docs/install/windows.md !4049 (Celeste Fox @celestefox) - Add container support for Windows 2022 21H2 !4047 - Add reference to CI_CONCURRENT_PROJECT_ID variable !4046 - Remove Windows 21H1 !4045 - Add merge release config to bump the VERSION file after the stable branches are merged into main !4041 - Upgrade golang.org/x/net to v0.7.0 !4039 - Add troubleshooting of the error "unsupported Windows Version" for k8s on Windows !4038 - Experiment: Add reviewer roulette !4037 - Use Docker_HOST if set in the build time !4036 - Docker daemon change data-root directory !4034 - Post-merge edits for Executor pages !4033 - Make runner manager lowercase !4032 - Add GitLab Runner autoscaling page !4031 - Use a fixed time in register command integration tests !4023 - Update version in docs !4022 - Add note about runner registration permission !4021 - Fix flaky racy tests !4019 - Update index.md to remove typo in the second paragraph !4013 (vsvincent @vsvincent) - Fix flaky TestDockerPrivilegedServiceAccessingBuildsFolder !4012 - Fix flaky interactive terminal test - ensure terminal connected !4011 - Temporarily skip Git-lfs for TestDockerCommandMultistepBuild !4009 - Remove comments metadata !4008 - Fix Test_Executor_captureContainerLogs race !4007 - Add note about Arm64 helper image for runner on arm64 Kubernetes clusters (docs) !4006 - Fix Docker-machine Windows tests !4003 - Re-use helper container for Docker executor's predefined stages !4000 - Improve troubleshooting documentation for the Job failed: prepare environment error with the Shell executor (docs) !3998 - Start prebuild stage earlier !3997 - Add a Runner glossary to the documentation (docs) !3996 - Remove note about selecting runner by name !3993 - Fix TestBuildOnCustomDirectory for PowerShell/pwsh !3992 - Only quote cmd batch strings where necessary !3991 - Use Ruby 3.2.1-based docs Docker images !3988 - Restructure registering runners page !3985 - Refactor executor setup/executor name function !3982 - CTRT edits Kubernetes part 4 !3963 - Drop extraneous "to" in feature flag docs !3946 - Update pipeline to depend on runner-incept passing !3940 - Improve layout with tabs !3894 - Update instructions to suggest go install rather than go build for building plugins !3819 - Building runner helper images with Windows nanoserver !3460 (Hoff_IO @82phil) ## v15.10.0 (2023-03-17) ### New features - Change runner type "specific" to "project" !3979 - Configure external address usage for autoscaler provider readiness check !3973 - Use UBI Minimal for GitLab Runner UBI-FIPS image !3966 - Make the `gitlab-runner register` command happen in a single operation !3957 - Do not send system_ID in UpdateJob call !3925 - Best-effort config validation !3924 - Implement ability to parse JSON payload from /runners/verify !3923 - Add -y to apt-get install Git-lfs to prevent stalling the installation. !3921 (Antoon Huiskens @antoonhu) - Handle registration for runners created in GitLab UI !3910 - Add support for activeDeadlineSeconds on CI Job Pod with k8s executor !3897 - Documentation for private fargate setup !3803 - Allow custom executor to specify the shell used !3789 (Robin Lambertz @roblabla) - Allow configuration of environment variables for runner services !3784 - Docker executor: add services_security_opt config option !3760 (Glenn Dirkx @juravenator) - Add API requests latency metric !3316 - Support for custom Kubernetes PodSpec !3114 ### Security fixes - Address vulnerability reports against runner-helper alpine images !3958 - Fix CVE-2022-1996 by upgrading k8s.io/client-go !3951 ### Bug fixes - Fix inconsiderate test !3971 - Fix non-amd64 alpine runner-helper images !3965 - Return BuildError from instance executor's client Run !3964 - Fix 'clear-Docker-cache' script for Docker 23.0 !3960 - Remove .runner_system_ID from Docker images !3950 - Remove re-writing config.TOML file on configuration reload !3934 - Add Windows Build Number to version mapping for Windows 2022 !3917 - Handle empty artifact paths !3912 - Execute the script from the right container !3900 - Shells/bash.go: set permissions before dir/file deletion !3726 (Karl Wette @karl-wette) ### Maintenance - Fix TestBuildOnCustomDirectory for PowerShell/pwsh !3992 - Fix merge request link with missing '-' scope !3987 (Niklas @Taucher2003) - Indicate that Command Line and Config.TOML are separate for debug !3986 - Fix missing parenthesis in the runners.Docker section !3981 (Tugdual Saunier @tucksaun) - Fix Windows PowerShell encoding test !3977 - Fix flaky interactive terminal test !3975 - Slightly change message shown when .runner_system_ID cannot be written !3969 - Update SSL troubleshooting link !3961 - Remove link to Docker Machine on GitHub - docs !3956 - Fix failing fuzzing test !3955 - Use Labkit for FIPS check !3954 - Kubernetes executor CTRT edits part 3 !3949 - Corrected minor typo !3948 - Bump Ubuntu version, ease quickstart with Runner !3947 - CTRT edits Kubernetes executor part 2 !3944 - Use latest docs Docker images !3941 - Fix deprecation notice legal disclaimer !3936 - Update Docker engine client to v23.0.1 !3935 - Remove reference to GitLab Runner 10 [docs] !3933 - Add container images support lifecycle [docs] !3931 - CTRT refactor for Kubernetes executor page part 1 !3928 - Fix typo in the post_clone_script deprecated warning message !3927 (Tamás Dévai @dorionka) - Remove overview heading from shell docs !3926 - Avoid running 1809 integration tests in CC !3922 - Language edits for "Automate keeping up to date with packagecloud release"" !3914 - Add troubleshooting item for background processes and hung job !3913 - Update golangci-lint version to 1.51.2 !3911 - Update the URL for the Docker-machine version from .11 to .19 !3909 - Update taskscaler version in GitLab-runner !3903 - Fix Warning log during prepare stage for the Kubernetes executor !3902 - Add type::feature as a new feature section for changelog !3898 - Expand and consolidate Git LFS docs for non-Docker executors !3892 (Nejc Habjan @nejc) - Upgrade Go version to 1.19.6 !3889 - Update documentation links for pod security context !3823 - Add step to enable linger to GitLab-runner !3688 (Peter Harsfalvi @peterh.six) ## v15.9.1 (2023-02-20) ### Bug fixes - Remove re-writing config.TOML file on configuration reload !3934 ## v15.9.0 (2023-02-19) ### New features - Ignore glrt prefix for runner short token !3888 - Log artifact download request host !3872 - Use taskscaler and nesting slots !3818 ### Bug fixes - Handle empty artifact paths !3912 - Execute the script from the right container !3900 - Update removal milestone in warning log message for step_script !3893 - Generate random system_ID for run-single mode !3881 (Helio Machado @0x2b3bfa0) - Clarify checking out message to reduce confusion !3880 - Allow runner to start when config directory is not writeable !3879 - Fix bug with project dir not resolving as in the project !3877 - Use JobVariable.Value() for internal values !3870 - Prevent masking panic by ignoring zero length secrets !3869 - Sending debug_trace param on PATCH job_trace requests !3857 ### Maintenance - Fix misspelling in documentation !3896 (Shafiullah Khan @gitshafi) - Add additional test coverage around path matching for artifacts (doublestar) !3890 - Add documetnation for shutdown_timeout config.TOML setting !3887 - Update Docker Machine installed in runner container image !3886 - Upgrade GitHub.com/BurntSushi/TOML !3883 - Clarify the use of --version when installing the Helm chart !3882 - Fixed wording for help command in docs !3878 (E Jo Zim @designerzim) - Use new Ruby 3.0.5-based Docker images !3876 - Drop Windows exemption for warning about system cert pool !3871 - Improve Docker Machine executor finish message !3868 - Add link to all metrics available !3867 - Update documentation about helper image being pushed on dockerhub !3866 - Update documentation to highlight access to CI Variables from container entrypoint with k8s executor !3865 - Add backticks to fix kramdown warning !3864 - Reduce log level to reduce noise in logging !3863 - Clean up docs redirects, runner - 2023-01-23 !3861 - Add metrics for counting configuration file access !3859 - Handle the content of the new pre_get_sources_script and post_get_sources_script job payloads in Runner !3858 - Use latest docs linting images for project !3856 - Update always policy to match the Docker wording !3851 - Log type of shell when using Shell executor !3850 (Anatoli Babenia @abitrolly) - Add default annotations to Kubernetes build pods !3845 (Adrian Rasokat @adrian.rasokat.tui) - Update removal milestone in deprecation warning !3844 - Document requirement for Docker executor image ENTRYPOINT to support sh/bash COMMAND !3839 (Pierre Beucher @pbeucher) - Update golang Docker tag to v1.18.10 !3828 - Docker executor CTRT part 4 !3826 - Automate keeping up to date with packagecloud release !3821 - Automatically set Alpine and Ubuntu version defaults in make !3816 - Warn about exceeding the global concurrency limit when setting up a new runner !3797 - CTRT Docker executor part 2 !3788 - Make external address usage configurable !3783 - Update redhat/ubi8 Docker tag to v8.7-1054 !3764 - Add support for setting procMount of build container !3546 (Alex Wied @alex-cm) ### Documentation changes - Change removal date to 17.0 for GitLab-runner exec (docs only) !3884 ## v15.8.0 (2023-01-19) ### New features - Add system_ID to Prometheus metrics !3825 - Send system_ID in jobs requests !3817 - Prepare register command to fail if runner server-side configuration options are passed together with a new glrt- token !3805 - Add nesting client to support VM-isolated build environments !3654 - #27863 Add mac address with isolation !3454 (Artem Makhno @artem.makhno.softsmile) - Display system_ID on build log !3852 ### Bug fixes - Fix doublestar implementation to use paths relative to working directory !3849 - Fix windows integration tests failure check !3846 - Re-merge "Artifact/cache helpers now use POSIX shell syntax for expansion" !3833 - PowerShell: fix unwanted progress streams leaking to output !3831 - Fix skipped windows integration tests !3830 - Fix relative URL path handling with clone_URL !3815 - Prevent new autoscaler thrashing instances !3813 - Add a check for any artifact paths that do not fall within the project directory or its subpaths !3757 - Use exec mode to create the scripts in attach mode !3751 - PowerShell: Fix stdin handling with scripts !3843 ### Maintenance - Revert "Fix go.mod to downgrade doublestar to v1.3.0 to be same as main" !3842 - Add pwsh to supported shells for Docker-windows executor !3829 - `--url` is GitLab instance URL, and not the address of the runner !3807 (Anatoli Babenia @abitrolly) - Revert "Merge branch 'avonbertoldi/29451/pkgcloud-auto-versions' into 'main'" !3794 - Bump the k8s integration test timeout to 15m !3787 - Make runner support multiple service aliases !3550 (Alessandro Chitolina @alekitto) ### GitLab Runner distribution - Start pushing Helper images to DockerHub again !3847 ### Documentation changes - Include reference to build pod configuration documentation !3848 - Add PowerShell to proper names list & minor formatting fixes !3837 (Ben Bodenmiller @bbodenmiller) - Fix Git for Windows casing !3836 (Ben Bodenmiller @bbodenmiller) - Improve wording !3835 (Ben Bodenmiller @bbodenmiller) - Clarify that GitLab-runner is required for both download and upload !3834 (Dillon Amburgey @dillon4) - Clarify variable type !3824 - Docs surround Kubernetes_ values with quotes !3820 - Documented how to protect environment variable in Kubernetes executor !3812 - Add clarifications for k8s pull policies !3811 - Fix kramdown warning issue !3808 - Update GitOps workflow warning !3806 - CTRT edits for Docker executor part3 !3802 - Adding namespace to anyuid command !3798 - Update fargate troubleshooting !3772 - Update using security context example !3723 ## v15.7.3 (2023-01-19) ### Bug fixes - PowerShell: Fix stdin handling with scripts !3843 ## v15.7.2 (2023-01-13) ### Bug fixes - Fix relative URL path handling with clone_URL !3815 - PowerShell: fix unwanted progress streams leaking to output !3831 - Re-merge "Artifact/cache helpers now use POSIX shell syntax for expansion" !3833 ## v15.7.1 (2022-12-19) ### Bug fixes - Revert automate for which supported distro releases we create packages. !3794 ## v15.7.0 (2022-12-17) ### New features - Add PrivilegedServices option for allowing/disallowing Docker services to be privileged !2652 - Add support for Windows Server 21H2 !3746 - Generate global system ID !3758 - Add start_type to virtualbox configuration !2558 - Update secret resolver to return raw & masked variables !3750 - Allow Executors to clone via SSH !3518 - Add Docker support for `IpcMode` for IPC namespace sharing !3781 - Expose the build timeout as an environment variable !3778 - Improve Runner's API health checking and handling !3658 ## v15.6.3 (2023-01-19) ### Bug fixes - PowerShell: Fix stdin handling with scripts !3843 ## v15.6.2 (2023-01-13) ### Bug fixes - PowerShell: fix unwanted progress streams leaking to output !3831 ## v15.6.1 (2022-11-24) ### Bug fixes - Fix cache config needing to be provided !3747 - Add GitLab-runner user during ubi-fips image building !3725 - Fix Kubernetes pod labels overwrite !3582 - Correctly handle expansion of job file variables, and variables that reference file variables !3613 - Artifact/cache helpers now use POSIX shell syntax for expansion !3752 ### Maintenance - Upgrade GitHub.com/urfave/cli to 1.22.10 !3744 - Unit test to catch urfave bug !3749 - Makefile.build.mk: allow building for arm64 without overriding ARCH !3498 - Renovate Go version !3768 - Add warning about using SIGTERM/SIGINT over SIGQUIT !3769 - Update golang Docker tag to v1.18.9 !3776 - Automate for which supported distro releases we create packages. !3756 - Fix silent Docker images build failure and retry buildx !3786 - Rename Docker's PrivilegedServices to ServicesPrivileged !3791 ### Documentation changes - Making things a little more obvious for those of us who may skip ahead !3697 - Clean up docs redirects, runner - 2022-11-23 - Document behavior for local addresses in [session_server] configuration !3676 - Docs: Nested guidelines for clarity !3729 - Fix some wording in docs and add links in convenient areas !3684 - Updated serviceaccount setting to match the code !3387 - Update agent for Kubernetes installation docs !3748 - Change deprecation documentation for register command !3742 - Make pod_labels more specific !3645 - Added doc to inform about saving cost when using private subnets and AWS S3 cache !3453 - Add more descriptive headings on executor pages !3763 - Add security warning to Runner install docs !3762 - Add troubleshooting details !3755 - Add note for self-managed customers !3761 - Update docs/executors/virtualbox native OpenSSH PowerShell !3775 - Fix Kubernetes Executor docs !3770 - Add note for AWS IAM instance profile !3774 - Add a requirement to create a namespace before overwriting !3696 - CTRT edits for The Docker executor part 1 !3753 - Expanded on downloading helper images and updated a link to use a more modern file. !3562 - Add `deprecated` to `gitlab-runner exec` !3773 ## v15.6.0 (2022-11-21) ### New features - Add support for Node Selector Overwrite !3221 - Handle job execution interruption for the new autoscaler executor provider !3672 - Add maximum size to uploaded cache !3552 - Allow multiple paths in Git_SUBMODULE_PATHS !3675 - Capture helper service logs into job/tasks main trace log !3680 - Add a feature flag to disable resolving of TLS chain !3699 - Adds proper handling of ExecutorProviders initialization and shutdown !3657 ### Bug fixes - Detect Windows build 10.0.19042 as 20H2 !3694 - Force PowerShell/pwsh input/output encoding to UTF-8 !3707 - Skip non-regular files for artifact metadata generator inclusion !3709 - Filter Kubernetes trace to remove newline added for long logs in attach mode !3691 - Enable PowerShell via stdin by default !3728 - Kubernetes executor: redial backend on internal server errors !3732 ### Maintenance - Update redhat/ubi8 Docker tag to v8.7-929 !3738 - Add OS versions supported by packagecloud 3.0.6 release !3734 - Add tests for Kubernetes scheduler name config !3643 - Update Go distribution to version 1.18.8 !3720 - Update logging levels from Debug to Info !3710 - Move autoscaler Acquire() to the ExecutorProvider !3660 - Document internal Executor Interface !3291 - Update Git to 2.38.1 and Git-lfs to 3.2.0 to address CVE-2022-29187 !3674 - Switch to markdownlint-cli2 !3683 - Ensure `go-fips` container is rebuilt when the version of Go is updated !3685 - Add logging in UpdateJob to include checksum and bytesize !3693 - Update taskscaler to newer version !3706 - Skip Docker Test_CaptureServiceLogs integration tests on windows !3703 - Update GoCloud to v0.27.0 and update Azure cache to use new SDK !3701 ### Documentation changes - Explain ANSI-relevance of log_format options !3739 - Fix broken links in runner docs !3737 - Add podman-plugins package dependency for service container network aliases !3733 - Add Taskscaler and Fleeting plugin instructions to Runner development !3730 - Document macOS workaround for TLS issues !3724 - Remove misleading statement regarding Bash in Windows planned feature support !3722 - Deprecate register command !3702 - Mark runnerRegistrationToken as deprecated !3704 - Add Helm repo update command to Kubernetes install docs !3736 - Add additional documentation around the use of submodules !3670 - Add Kubernetes certificate guide !3608 - Troubleshooting for pods always assigned worker node's IAM role !3678 - Change $shell to $SHELL in "Set up macOS runners" docs !3681 - Fix docs review app script and domain !3682 - Update redirected links in the runner docs !3690 - Improve development setup docs !3661 - Update Runner Helm chart docs to include list of deprecated fields !3686 - Add details to Documentation MR template !3698 - Adding Ubuntu 22 to the supported OS list !3712 - Adds deprecation notes for Docker-SSH and Docker-SSH+machine executors !3714 - Updated template to match other repo !3715 ## v15.5.1 (2022-11-11) ### New features - Add a feature flag to disable resolving of TLS chain !3699 ## v15.5.0 (2022-10-21) ### New features - Add shell+autoscaler executor !3617 - Add Docker volume driver ops !3620 - Kubernetes executor: support podspec.schedulerName !2740 - Add IPv6 support to Docker networks !3583 - Add Prometheus metrics to executor autoscaler !3635 - Add Git_SUBMODULE_DEPTH variable !3651 - Add support for PAT masking in trace !3639 ### Bug fixes - Set all existing variables into the build container !3607 - Add pgrep to ubi-fips image !3625 - Standardize Attestation Artifact Names and Permissions !3650 - Do not expand some CMD variables ### Maintenance - Upgrade Go to version 1.18.6 !3589 - Add TMPDIR to test's env allowlist !3603 - Go 1.18 mod tidy !3619 - Drop runtime.GC() after every check !3595 - Upgrade Go FIPS image version to 1.18 !3624 - Add internal autoscaler executor provider unit tests !3633 - Only generate mocks that are actually used in tests !3630 - Fix incorrect spelling of acquisition !3621 - Add User config setting for Docker executor !2913 - Upgrade Go FIPS image version to 1.18.7 !3640 - Upgrade Go distribution to version 1.18.7 !3656 ### Documentation changes - Added GitLab Runner to title !3618 - Clarify k8s executor overrides per CI/CD job !3626 - Add note about Docker-in-Docker !3628 - Fix indentation for [runners.cache] in Kubernetes docs !3634 - Clean up docs redirects !3632 - Document hidden retry for failed Docker pull !3638 - Refactor autoscaler terminology !3641 - Update redirecting external links for Runner !3631 - Explain metric …request_concurrency_exceeded_total !3558 - Update contribution details when it requires changes to both GitLab and Runner !3649 - Disk root size parameter !3652 - Remove Grafana dashboard link !3653 - Move Content from best_practices page !3665 - Remove content that didn't add value !3667 - Updated path for group runners !3664 - Fix ordered list display abnormal error !3663 - Set variable to new domain for docs review apps (Runner) !3671 ## v15.4.2 (2022-11-11) ### New features - Add a feature flag to disable resolving of TLS chain !3699 ## v15.4.1 (2022-10-21) ### Security fixes - Do not expand variables in Command ## v15.4.0 (2022-09-21) ### New features - Add renovate support !3592 ### Bug fixes - Reset token in config template when set !3593 - Remove reliance on text/transform for trace masking !3482 ### Maintenance - Update instructions with new menu title !3599 - Update project for latest Vale and markdownlint tooling and rules !3598 - Docs: Small edit to language !3596 - Updated title to match left nav !3588 - Delete tmp/GitLab-test directory. !3585 - Updated title to match our standards !3584 - Allow setting of Docker volume label mode independent of read/write mode !3580 - Improve clarity of runner metrics examples !3578 - Remove 'respectively' and 'please note' !3574 - Add io error to troubleshooting section !3573 - Docs: Adding details about GitOps configuration for agent !3572 - Fix runners location in docs !3555 - Add path implementation to support Windows Docker from unix !3344 - Update redhat/ubi8 Docker tag to v8.6-943 !3605 - Update alpine Docker tags !3604 ### Security fixes - Upgrade Prometheus/client-golang from v1.1.0 to v1.11.1 ## v15.3.3 (2022-11-11) ### New features - Add a feature flag to disable resolving of TLS chain !3699 ## v15.3.2 (2022-09-21) ### Security fixes - Do not expand variables in Command ## v15.3.1 (2022-09-21) ### Security fixes - Upgrade Prometheus/client-golang from v1.1.0 to v1.11.1 ## v15.3.0 (2022-08-19) ### New features - Improve documentation about installing and using Podman as a Docker executor replacement !3570 - Add support SELinux type label setting in Kubernetes executor !3451 (Omar Aloraini @ooraini) - Add a check whether boringssl is being used by using the Enabled method !3390 - Add support for server side encryption for S3 Cache !3295 (Johan Lanzrein @lanzrein) - Remove CentOS 6 packaging !2871 (Bene @bene64) ### Bug fixes - Generate artifacts metadata only for zip !3565 - Build s390x images alongside the other images !3561 - Ensure that runner always uses the customized User-Agent !3543 - Revert GitHub.com/urfave/cli back to v1.20.0 !3539 - Improve error message when there's a conflict between `pull_policy` and `allowed_pull_policies` settings !3526 - Sanitize user-provided custom build directory before passing it forward !3360 ### Maintenance - Docs: Remove old install page !3563 - Update default label for documentation MR template !3559 - Promote GitLab.MultiLineLinks to error !3554 (Niklas @Taucher2003) - Fix links split across multiple lines in Runner repo !3553 - Add note on GitLab instance pre-requisite for using Runners - docs !3549 - Update markdownlint and Vale configuration !3548 - Fix "broken" links (redirect) !3542 (Lee Tickett @leetickett) - Add `hostname` to the UBI-fips helper image !3540 - Docs: Fix a typo in `pull_policy` which is should be underscore !3537 - Update linter version to 1.46.2 !3536 - Update Helm chart troubleshooting for missing secrets !3534 - Protect commands/config with a mutex !3507 - Fix dead link & other runner docs cleanup !3491 (Ben Bodenmiller @bbodenmiller) ### Documentation changes - Remove premium tier from agent install docs !3535 - Add new functionality related to runner token expiration !3209 (Kyle Edwards @KyleFromKitware) ## v15.2.0 (2022-07-20) ### Bug fixes - Update GitHub.com/containerd/containerd dependency !3525 - Rename DEBUG env var to RUNNER_DEBUG !3497 ### Maintenance - Push image on registry during release stage only when enabled !3528 - Fix version history formatting !3523 - Upgrade Go to 1.17.9 in project !3515 - Disable push to ECR in all cases !3514 - Make resource checking disabled by default !3513 - Fix DEB_PLATFORMS definition in the Makefile !3510 - Monitor Docker-machine provision failed state !3355 (StoneMan @Wenyuyang) - Run incept tests only for canonical namespaces !3341 ### Documentation changes - Update command usage and GitLab Runner version !3531 - Restore previous step for freebsd install procedure !3527 - Fix link to cluster agent !3521 - Add explanation on how to select runner manager node with nodeSelector !3520 - Update sysrc command for Freebsd installation procedure !3519 (Roller Angel @rollerangel) - Add security context for init permissions container !3516 - Add note about configurability of Fargate host properties !3509 - Remove columns to correct rendering config.TOML, CLI options and ENV variable for the register table !3508 - Add the pull-policy from jobs support to Kubernetes !3504 - Remove trailing spaces from docs !3502 - Add note for pre existing runner use condition !3501 - Improve the output of registration command !3500 - Fix description of 'Coordinator' in FAQ !3496 - Add some clarifications to how job_env in Custom Executor works !2810 ## v15.1.0 (2022-06-20) ### New features - Generate artifacts metadata !3489 - Add image pull-policy support to services !3488 ### Bug fixes - Init submodules prior to sync to ensure submodules remote URL configuration is properly synchronized !3265 (David Alger @davidalger) - Honor entrypoint for build and helper images with exec passthrough !3212 (bdwyertech @bdwyertech) ### Maintenance - Ignore TestPowershell_GetConfiguration for all windows versions !3494 - Add TestPowershell_GetConfiguration/pwsh_on_shell_with_custom_user_(windows)... !3492 - Update Docker images for linting docs !3490 - Add note about GitLab-runner-fips !3487 - Update MinIO-go dependency to fix FIPS endpoints !3484 - The context of the language would suggest the plural form of this noun. !3483 - Fixed a broken link for FIPS RHEL runner !3481 (Brock R @fearthebadger) - Clarify on Docker engine version requirement !3479 - Expand variables for Pod volume subPath and mountpath config !3478 - Update documentation on interactive web terminal support for Helm chart !3477 - Add upgrade code sample for arm64 !3475 - Fix error in oc create configmap command - docs !3471 - Remove windows server 2004/20H2/21H1 related tests from community MR pipeline !3467 - Do not retry artifact download on 401 response !3461 - Modify doc mentions of RedHat to Red Hat !3459 (lousyd @lousyd) - Update project to use latest linting images from GitLab-docs !3452 - Use `T.TempDir` to create temporary test directory !3410 (Eng Zer Jun @Juneezee) - Use 'go install' instead of 'go get' to install tools !3402 (M. Ángel @jimen0) - DeviceCgroupRules for Docker Executors !3309 (Alexander Sinn @embeddedcoder) - Workaround to slow artifacts upload to GCS !3194 - Add extra information when "no matching files" !3079 (Adrian Mârza @adrian.marza.mambu) - Override ci image and registry for all windows helper pushing jobs !3485 - health-check port discovery should be consistent with WAIT_FOR_SERVICE_TCP_PORT !3033 (Anton Neznaienko @neanton) ### GitLab Runner distribution - Trigger UBI images for all releases and main branch !3466 - Fix not pushing main Runner images to Docker hub !3465 ### Documentation changes - Add Podman configuration steps !3480 - Implement allowed_pull_policies in config.TOML !3422 - Implement supporting pull_policy from jobs !3412 - Allow to overwrite Pod labels in the Kubernetes executor !3352 (Mathieu Parent @sathieu) - Add a flag to `gitlab-runner exec` to specify the CI/CD config file !3246 (Alexis Jeandeau @jeandeaual) - Use GCP metadata server and sign blob API for GCS cache URL !3231 (Jasper Maes @jlemaes) - Complete the example configuration for gcp cache !2956 (Edward Smit @edwardsmit) - Support Priority Class Name for Kubernetes executor !2685 (ayoub mrini @ayoubmrini424) ## v15.0.0 (2022-05-19) ### Security fixes - Improve sensitive URL parameter masking !3404 ### Bug fixes - Allow S3 cache's AuthenticationType to be provided case-insensitively !3446 ### Maintenance - Update Git-lfs to 2.13.3 !3458 - Add TestMachineIdleLimits in the windows 21h1 test failure !3457 - Repair redirected links !3456 - Add history to docs for Kubernetes pull policy !3455 (Raimund Hook @stingrayza) - Run bleeding edge windows builds for security pipelines as well !3449 - Fix minor grammatical error. !3448 (Crafton Williams @crafton) - Fix windows 21H1 pushing helper images and integration tests !3447 - Delete trailing whitespace !3443 - Fix alpine-latest pipelines for pwsh and prevent this happening on main in the future !3442 - Moved content to executor pages !3440 - Add instructions for how to specify what user a job is run as via Docker executor !3438 - Update alpine versions to latest !3436 - Parallelize Kubernetes TestRunIntegrationTestsWithFeatureFlag tests !3435 - Update FIPS base UBI image to 8.6-754 !3434 - Add alpine-latest helper image flavor and switch default alias to 3.15 !3433 - List source of Default templates !3431 (Ben Bodenmiller @bbodenmiller) - Switch from cobertura to coverage_report keyword !3429 - Stop publishing helper images to Docker Hub !3425 - Add a note to troubleshooting section regarding security release !3424 - Set max_line_length attribute in .editorconfig !3423 - Fix 21h1 hcsshim::CreateComputeSystem error !3421 - Fix indentation for Docker run runner example !3419 - Register runner with renamed paused argument !3414 - Enable CGO_ENABLED by default in golang-fips compiler !3413 - Change amazonec2-security-group to XXXX in example !3411 - Check serviceaccount and imagepullsecret availability before creating pod !3399 - Make clear-Docker-cache script to work for Docker versions below 17.06.1 !3394 (Roland Hügli @rhuegli) - Servername in openssl command !3374 - Update index.md !3356 (Don Low @Don.Low) - Docs: Small edit to change 'how' to 'what' !3325 - Update docs/monitoring/index.md !3216 - Expose fastzip configuration options !3130 - Docs: Update autoscale_aws_fargate to include ca certificate location !2625 - Print out service timeout seconds in Docker executor !279 (Elan Ruusamäe @glensc) ### GitLab Runner distribution - Add packages added by package cloud 3.0.5 release !3437 - Use SHA256 instead of MD5 for digest !3415 ### Documentation changes - Add step for AppSec in the security release template !3432 - Make explicit disabling of strict-host-key-checking mandatory by default !3418 - Add support for Windows server 2022 !3218 - Add sh to --shell --help following documentation !2988 (David Hannasch @dHannasch) ## v14.10.1 (2022-05-02) ### Security fixes - Disallow reserved CACHE_FALLBACK_KEY values !49 ## v14.10.0 (2022-04-19) ### Bug fixes - add tip for windows Docker permissions !3397 - Add newline between the command and the output when collapsible section is enabled !3389 (Thomas Chandelle @tchandelle) - Increase token short length if it includes prefix !3373 ### Maintenance - Update lint-Markdown image for docs !3408 - Remove explicit mention of t4g.nano !3405 - Log object storage forbidden errors during artifact downloads !3400 - Change release milestone for k8s operator - docs !3395 - Link macOS install docs to config docs !3392 - Add runnerImage property to OpenShift Operator docs !3385 (Em Karisch @QuingKhaos) - Artifacts download argument validation !3384 - Added how to fix TLS handshake timeout error in a proxy environment !3383 - Fix a typo in the cache uploading messaging !3382 (Lee Tickett @leetickett) - Add new troubleshooting step to the Kubernetes docs !3380 - Change the docs review apps IP !3379 - Debian 9 won't build / qemu now requires -F !3369 (Donny Davis @donnydavis) - Add support for Docker client version negotiation !3322 - docs: update region specific s3 endpoint urls !2975 (Casey Vockrodt @casey.vockrodt) - Add archiver staging directory option to runner helper !3403 ### GitLab Runner distribution - Add amazon/2 RPM distribution to the release list !3378 ### Documentation changes - Add Kubernetes operator installation and uninstallation docs and updated OpenShift docs !3388 - Add runner registration related properties to OpenShift Operator !3386 (Em Karisch @QuingKhaos) - Support Docker container custom labels !3304 (aylinsenadogan @aylinsenadogan) - Update release process link in readme !3319 (Théo DELCEY @theodelcey) ## v14.9.0 (2022-03-21) ### New features - Add posix shell quoting implementation !3367 ### Bug fixes - Use token from job payload when composing repository URL based on clone_URL !3366 - Upgrade MinIO to v7.0.24 pre-release, for IAM timeout fix !3354 - Upgrade fastzip to v0.1.9, fixes invalid timestamps !3353 - Update network responses to support 64-bit Job IDs !3346 - Upgrade fastzip to v0.1.8 !3333 - Allow changing shell executor with pwsh user !3298 - Remove bashisms from Bash shell implementation !3014 (Neil Roza @realtime-neil) ### Maintenance - Update stringData for Custom TLS cert !3372 - Add default issue & MR templates !3368 - Docs: Added fleet management link !3364 - Add link to AWS driver docs in GitLab Docker machine - docs !3363 - Change fleet scaling to best practices for runner shared services - docs !3362 - Docs: Kubernetes volumes are mounted on services !3361 (Quentin Barbe @forty1) - Add warning about enabling debug logging !3359 - Add links to clarify AWS and Docker credentials requirements and clarification on image: tag !3358 - Add link to Docker machine fork - docs !3357 - Edited for style !3351 - Run trigger-UBI-images-build job also for patch release tags !3350 - Update runner registration failure log message !3349 - Add runner registration message section - docs !3348 - Move Path interface to Docker volume consumer !3343 - Neaten helpers/path unix path impl !3342 - Fix misleading error during cache restoration !3340 - Clean up docs redirects - 2022-02-22 !3339 - Make SSH command/executor shell agnostic !3337 - Remove redundant shell config environment property !3336 - Updated agent for Kubernetes !3334 - Update CI toolchain versions !3330 - Upgrade Docker to 20.10.12 !3328 - Support Vault EE namespaces !3320 (Aleksander Zak @aleksanderzak) - Add Debian bullseye to supported versions !3318 - Add post_clone_script hook !3211 (Dan Rice @dnrce) - Docs: Update Kubernetes key file format !3097 (Brandon Hee @brandonhee) - fix grammatical error !2896 (James Dube @jamesdube) ### GitLab Runner distribution - Fixes version definition in VERSION file !3371 - Align Debian releases for stable and Bleeding Edge versions !3335 ### Documentation changes - Add support for Kubernetes runtime class !2326 - Add docs about security risks for using cache and the Git_strategy=fetch !3365 ## v14.8.0 (2022-02-20) ### New features - Allow specifying maintenance-note on runner registration !3268 - Support Apple Silicon (darwin/arm64) !2274 - Add variable support for services (Stefano Tenuta @ST-Apps1) !3158 ### Bug fixes - Fix artifacts upload redirection support !3308 - Handle redirects on artifact uploads !3303 - Introduce non-reusable Docker cache volumes !3269 - Merge the config template before asking the user for configuration !2561 (Matthias Baur @m.baur) - Make use of build requests/limits for build permission init container !3321 ### Maintenance - Add details to docs on CI_SERVER_TLS_CA_FILE !3332 (Ben Bodenmiller @bbodenmiller) - Ensure shell writers terminate with newline flush !3329 - Upgrade Go to 1.17.7 !3327 - Install supported Go version for Windows prior to testing !3324 - Upgrade MinIO to v7.0.21 !3323 - Fix milestone ship date error for the idlescalefactor feature - docs !3317 - Remove vendor/ directory !3314 - Divide packages buildling jobs in the pipeline even more !3313 - Use latest docs linting image for Markdown !3312 - Docs: Update shell descriptions to use full names !3310 (Neil Roza @realtime-neil) - Bump version of Go for project to 1.17.6 !3305 - Fix Azure caching example config !3300 (Stefan Asseg @stefanasseg) - Encourage use of K8s secrets !3299 (Christian Mäder @nxt.cma) - Update interactive example that was incorrectly set to non-interactive !3297 (Arran Walker @ajwalker) - Update support for session_server using Helm chart !3296 - Cleanup cache proxy pattern !3294 - Adds details about how to limit the number of VMs when autoscaling !3289 - Update linting configuration from GitLab project !3288 - Replace Ruby:2.6 in examples and test cases with Ruby:2.7 !3287 - Update runner security docs !3279 - Update Page with more common -machine-machine-options for use with Docker and amazon ec2 instances. !3259 - Add information on how to connect to S3 from Runners on Amazon EKS with IAM Role for ServiceAccount !3251 - Add version number to windows helper image tags !3217 (Florian Greinacher @fgreinacher) - Update docs/executors/shell.md !3208 - To disable wait_for_services_timeout use -1 not 0 !3207 - Add support for extra submodule update flags !3192 (Nejc Habjan @nejc) - Clarify that listed limitations are specific to Windows !3155 - Ensure proper assumptions !3038 (Deniz Adrian @zined) - Update the security caveats about the usage of privileged mode !2482 - Add Debian/bullseye to packagecloud DEB_PLATFORMS !2940 (James Addison @jayaddison-collabora) ### Documentation changes - Add details on concurrent parameter for Docker executor - docs !3286 - Add alpine 3.15 as new runner/helper-image flavor !3281 (Fabio Huser @fh1ch) ## v14.7.0 (2022-01-19) ### New features - Add RHEL/UBI amd64 FIPS support !3255 ### Bug fixes - Exclude stderr content from parsing UID/GID information within Docker executor !2768 ### Maintenance - Fix fips rpm package name to sign !3285 - Mark "prepare go fips" job as optional !3284 - Updating documentation linting images for project !3283 - Fix external links from project and remove old redirects !3282 - Restore Git 1.8.3.1 tests !3278 - Fix tests using GitLab-grack submodule !3272 - Clarify how to configure network mode with Docker executor !3264 - Update golangci-lint !3261 - Pass UPSTREAM_CI_COMMIT_REF to incept tests !3257 - Update Sentry library from raven-go to Sentry-go !3199 (Markus Legner @mlegner) - Bump used Go version to 1.17 !3112 - Show error details for failed artifact uploads !3240 ### GitLab Runner distribution - Fix the 'stable GitLab release' job !3252 ### Documentation changes - Point to GitLab maintained fork of Docker Machine !3276 (Thameez Bodhanya @thameezbo) - Release of a FIPS Compliant runner !3274 - Adds note about 5 GB S3 cache limit !3266 - Added troubleshooting steps !3273 - Fix broken external links !3270 (Niklas @Taucher2003) - Update to mention CentOS stream 8 instead of CentOS linux 8 !3267 (Ondřej Budai @ondrejbudai) - Document need for entrypoint to open shell !3256 - Updated language for Kubernetes executor !3253 - Update link to K8s pull policy !3254 - Improve the cache documentation for k8s executor !3237 - Update docs for GitLab Runner Helm Chart using ACS (retired) to AKS !3219 - Remove trailing spaces for Jan 2022 TW monthly chores !3275 ## v14.6.0 (2021-12-17) ### Bug fixes - Implement improved JSON termination mode for Kubernetes !3225 ### Maintenance - Add Vale rule updates from the GitLab project to this project !3249 - Minor capitalization and style fix !3248 - Trigger UBI images build also from security fork !3245 - Add note about running Docker runner with Docker-machine functionality !3236 (Ihor Martyniuk @enoot) - Remove coverage reports from S3 release !3235 - Add curl in alpine image !3233 - Fix flaky garbage collection test !3230 - Move the "static QA" job to the postrelease stage !3227 - Automatically retry integration_k8s jobs !3226 - Docs: Clarifying that it's "a" macOS machine, rather than "yours" !3223 - Remove unneeded quotes from markdownlint config !3215 - Run incept tests in the postrelease stage so that all binaries and images are available !3214 - Update markdownlint and Vale rules from GitLab project !3213 - Add additional docs and integration tests for cache.s3.AuthenticationType !3210 - Docs: Changed "clean up" from noun to verb !3206 - Docs: Clarify what Runner Cloud is !3205 - Drop gorilla/mux in favour of http.ServeMux !3203 - Add idle GitLab_runner_jobs metric per runner !3202 - Fix links to shared runners documentation !3201 - Add openssl command to download the cert !3200 - Improve Runner container image size for Ubuntu and alpine !3185 (Furkan Türkal @Dentrax) - Autoscale VMs based on a percentage of in-use VMs !3179 - Use native go errors and drop pkg/errors !3104 (feistel @feistel) - Fix the 'stable GitLab release' job !3252 ### GitLab Runner distribution - Push stable images built on security fork to canonical repository !3242 - Update the GitLab Release job !3228 ### Documentation changes - Update lint-html image for docs !3239 - Docs: Added OpenSSL SSL_connect: SSL_ERROR_SYSCALL troubleshooting topic !3229 - Docs: Add pod cleanup info in the Kubernetes doc !3224 - Update docs for installing runner from binary !3222 (Wojciech Pater @wpater) - Changed symbol in docs table !3220 - Add Native Windows OpenSSH Server and PowerShell support for Virtualbox and Parallels executors !3176 (Guillaume Chauvel @guillaume.chauvel) ## v14.5.2 (2021-12-10) ### Security fixes - Fix `syscall.forkExec` calling `close(fd=0)` on pipe error [!44](https://gitlab.com/gitlab-org/security/gitlab-runner/-/merge_requests/44) ## v14.5.1 (2021-12-01) ### Security fixes - Limit Docker executor's container reads to prevent memory exhaustion [!37](https://gitlab.com/gitlab-org/security/gitlab-runner/-/merge_requests/37) ## v14.5.0 (2021-11-21) ### New features - Scrub the X-Amz-Security-Token parameter from query strings !3171 (Estelle Poulin @estelle.a.poulin) - Kubernetes executor container security context !3116 ### Bug fixes - Fix lockfile cleanup for submodules !2858 (Nejc Habjan @nejch1) ### Maintenance - Docs: Added SSH executor disable_strict_host_key_checking details !3195 - Fix releasing alpine 3.12 helper images !3193 - Renamed enterprise_guide to fleet_scaling !3191 - Add all available unix OS build tags to unix targeted go files !3189 (Arran Walker @ajwalker) - Fix GitLab grack to use our own repositories !3187 - Use newer docs linting image !3186 - Update changelog generator configuration !3183 - Fix Docker pulling image integration test !3182 - Break out shell blocks to allow copy from button !3181 - Add troubleshooting info to Runner installation with Agent !3180 - Log errors when failing to close Docker client !3178 - GitLab-runner Dockerfile: clear /tmp of Ubuntu Docker image !3177 (Yalcin Ozhabes @trim_the_main) - Fix PVC volume config generation in Kubernetes executor !3174 (Sandra Tatarevićová @sandra17) - Add troubleshooting note for dind connection error on k8s executor !3173 - Docs: Clarified concurrency setting !3172 - Fixed broken external links !3168 - Fix: typo in docs/register/index.md !3166 (David Duncan @duncan.davidii) - Docs: Clarify runner token !3165 (Stefan Schmalzhaf @the_s) - docs: add useful notes on setting session_server !3164 (Yang Liu @robturtle) - Updated broken external links !3163 - Refactor images building and publishing jobs !3162 - Add changeable config directory for root !3161 (Boris Korzun @boris.korzun) - Docs: Correct link to Windows shared runner info !3160 - Use sync.Mutex rather than RWMutex for simple protections !3159 - Remove need for Git in runner images !3152 (Ben Bodenmiller @bbodenmiller) - Suppress Git hints about branch naming standards !3148 - Update golang-cli-helpers library, support env namespaces !3147 - Handle situations when neither `ntpdate` nor `sntp` is available !3143 (Alexander Kutelev @kutelev) - Docs: Small edits to enhance readability !3137 (Ankita Singh @ankita.singh.200020) - Better support for PowerShell on Kubernetes !3119 - Do not pass in bash detection script into build container !3095 - Kubernetes executor should only used SharedBuildsDir behaviour when it is required !3078 (David Alger @davidalger) - [DOCS] - Improve image pull secrets documentation clarity !3047 (Aaron Johnson @acjohnson1985) - Document how to run jobs as non-root user for Kubernetes and Kubernetes with Helm !2900 - Allow finer-grained control over pod grace periods. !2130 (Dominic Bevacqua @dbevacqua) ### GitLab Runner distribution - Provide Docker images for alpine 3.12 (default), 3.13 and 3.14. !3122 ## v14.4.0 (2021-10-25) ### Security fixes - Sanitize Git folder after each build !3134 ### Bug fixes - Add Kubernetes pod label sanitization !3054 (Theodor van Nahl @t.vannahl) ### Maintenance - Revert "Merge branch 'alpine-3-13' into 'main'" !3157 - Consider all Docker pull image system error as runner script failure !3142 - Docker Executor: use Stop for graceful shutdown !3128 (Aaron Friel @frieltwochairs) - Update to MinIO-go v7.0.13 !3120 (Philip Schwartz @pschwar1) - Explicit configuration for cache s3 authentication type !3117 - refactor: remove osext dependency !3101 (feistel @feistel) - Respect Docker Runtime setting for services !3063 (Jakob-Niklas See @networkException) ### GitLab Runner distribution - Split packagecloud release by distribution flavor !3146 ### Documentation changes - Mark URLs compatible with markdownlint-cli 0.29.0 !3154 - Remove Fedora 34 from list of packages - docs !3151 - Fixed docs crosslink from Advanced Config !3149 (Raimund Hook @stingrayza) - Update Autoscale config due to Docker machine deprecation docs !3144 - Compatibility chart update !3141 (Alexander Kutelev @kutelev) - Update Docker_machine.md - docs !3140 - Change description for GitLab_runner_limit !3139 - docs: Delete link to GitLab.com-config (target does not exist) !3138 (Stefan Schmalzhaf @the_s) - Fix YAML indention of GCS secretName !3136 (Kate @kate_stack11) - Replace incorrect terminology (storage -> bandwidth) !3135 (Jay Williams @codingJWilliams) - Docs: Updated Microsoft Service policy links !3133 - Runner: fix some broken external links !3127 - Additional step when install GitLab Runner using KAS !3126 - Added info about code handling for Windows runner !1975 ## v14.3.0 (2021-09-21) ### New features - Cleanup build directory with feature flag !3065 - Consider only Docker image pull system error as runner-system-failure !3060 ### Security fixes - Restrict accepted metric listener HTTP methods !3109 ### Bug fixes - Fix Docker-machine executor check to reduce warning log spam for no runners able to process a job !3106 (Thomas Scully @tscully49) ### Maintenance - Turn on FF_SCRIPT_SECTIONS for GitLab Runner pipelines !3124 - Expose runner limit error message on registration !3108 - Split linux packages into multiple jobs !3105 - Upgrade MinIO to v7 !3099 - Update runner docs tests !3096 - Remove Docker-machine feature flag !3093 - Improve log line decoding for Kubernetes !3091 - Add strict host key checking to SSH config !3074 - Upgrade alpine version to 3.13.6 !3057 - Improved bash shell escaping (behavior, performance) !2882 ### Documentation changes - Added mac setup guide !3129 - Fix trailing spaces in Runner docs !3125 - Per-build networking is recommended !3118 - Fixed typo in Dockerfile example for installing Runner with Docker !3113 (Markus Möslinger @metabytewien) - Update documentation template !3107 - Use latest docs linting images !3100 - Update feature-flags.md, fixed typo, runners.feature_flag -> runners.feature_flags !3098 (Joost van der Sluis @jvdsluis) - Reword so that Docker services list "images" instead of "applications" !3094 - Adds Linux register command for behind a proxy !3087 (Rui Duarte @P0w3rb0y) - Add info for Docker_HOST value in Using Docker:dind !3085 - Added Docker image build steps for Alpine !3076 - Add doc in FAQ about running Elasticsearch !3110 - Fix typo in security process !3092 ## v14.2.0 (2021-08-22) ### Bug fixes - Do not execute checks for windows integration test in docs only default branch pipeline !3070 - Limit job log to ensure it contains UTF-8 valid data !3037 - Fix Ubuntu helper image builds to use correct platform (not always amd64) !3032 (Sneha Kanekar @skanekar1) - Fix trace short writes when large masks are configured !2979 - Fix cleaning of removed sub-submodules when using fetch strategy !2883 (DmtiryK @dkozlov) ### Maintenance - Update trace force send interval to be dynamically adjusted based on update interval !3064 - Update rules for windows tests to fix docs pipeline !3062 - wrap each line in a script block as a section !3051 - Add new histogram metrics to Docker+machine executor !3050 - Do not ignore failure in Windows jobs due to timeout !3042 - Fix release job to use JOB-TOKEN !3041 - Support of Kubernetes lifecycle hooks !3036 - Add all of gl-docsteam to docs CODEOWNERS !3026 - Add Evan and Marcel to docs CODEOWNERS !3025 - Use CI_JOB_TOKEN to create releases !3023 - Explicitly set Kubernetes pull image failure as script failure !3015 - Implement changes rules for executing full and docs-only pipelines !2978 - Move build log's ANSI Reset to before newline to fix test output  !2977 - Update configuration of changelog generator !2968 - Update remaining only except to rules in pipeline !2938 - Add support for determining helper image from node selector information !2840 - Upgrade specified Git version to 2.30.2 !2825 - Add allowed images restriction to Kubernetes executor !2669 (Yi Wei Pang @pangyiwei) - Allow CI image option to override base image name (VirtualBox & Parallels) !1257 (Alexander Kutelev @kutelev) ### Documentation changes - Modified the runner troubleshooting page for confirming the GitLab version and runner version !3081 - Update docs with the correct link about runner scope !3077 - Clarify the need for max overwrite definitions when overwriting via CI/CD script !3075 - Add troubleshooting entries for k8s-caused faults !3073 - Docs: Recommend to use latest self-managed runners with .com !3072 - Docs: Addded FREE tier badge !3069 - Docs: Addded FREE tier badge !3068 - Docs: Addded FREE tier badge !3067 - Docs: Added code block end tag that was missing !3066 - Docs: Fixed typo, changed "process" to "signal" !3061 (Igor @igordata) - Docs: Add how to log in as current user in the Terminal so GitLab-runner installs properly !3055 - Improve wording of docs/security/index.md !3031 (Ed Sabol @esabol) - Docs update advanced configuration !3028 - Update Vale rules with latest settings from GitLab project !3024 - Fix outdated link to custom build directories in runner advanced configuration docs !3022 (zertrin @zertrin) - Docs: Add version for Kubernetes custom builds directory mount option !3016 (Ben Bodenmiller @bbodenmiller) - Capitalize CPU on line 187 !2893 - Create Enterprise guide for deploying and scaling a GitLab Runner Fleet !2755 ### Other changes - Improve testKubernetesGarbageCollection integration test !3080 - Update the Kubernetes executor's attach strategy to work with Windows pods !3059 - Fix missing end quote in packagecloud script !3049 - Fix incorrect Kubernetes Windows paths for artifacts and caches !3046 - Set DOCS_REVIEW_APPS_DOMAIN in the CI config directly !3044 - Updated CODEOWNERS for docs team members who are maintainers !3035 - Update build versions for Fedora !3034 - Enable container scanning for GitLab Runner !3027 - Garbage collection supports for Kubernetes executor !2983 - Fix flakiness of the TestAttachReconnectReadLogs test !2954 ## v14.1.0 (2021-07-20) ### Bug fixes - Fix trace short writes for large log lines !2993 - Confirm if Docker is installed in `clear-docker-cache` !2961 ### Maintenance - Add CODEOWNERS for docs !3017 (Ben Bodenmiller @bbodenmiller) - Add TestBuildOnCustomDirectory/pwsh as test failure on windows 20h2 and 2004 and TestMachineIdleLimits on 1809 !3011 - Allow KUBECONFIG and GitLab_CI env in integration tests !3010 - Fix vendor out of sync !3008 - Use image's PowerShell Core for Windows tests !3005 - Remove explicit use of GOROOT/GOPATH now that we're using Go modules !3002 - Remove unneeded test configuration !3001 - Fail k8s integration tests when the check command fails !2999 - Fix on-demand releasing of helper images !2998 - Stop considering Docker pull image as runner system failure !2995 - Skip Docker-machine provision on failure by default !2986 - Fix make prepare_index read GPG_KEY from file !2985 - Fail CI build if test failures not updated !2976 - Only print necessary env vars in tests !2971 - Update environment name for Linux Docker images !2970 - Don't run fuzz variable mask test for docs !2965 - Add environment for GitLab stable release !2962 - Add environment name for package jobs !2959 - Use file based variables for GPG_KEY !2958 - Update default branch from master to main !2930 - Only allow failures with exit code 99 in Linux tests !2704 - Test passing a config template to the RegisterCommand !2451 - Make the variable type for the GitLab CI secret configurable !2414 ### GitLab Runner distribution - Add support for Windows Server core, version 20H2 [Semi-Annual Channel release] !2908 ### Documentation changes - Restructure markdownlint configuration !3012 - Update sudo command for linux repository install !3009 - Fix broken links in Runner docs !3007 - Add note on IdleCount to autoscaling docs !3004 - Update feature flag FF_SKIP_Docker_MACHINE_PROVISION_ON_CREATION_FAILURE grammar !3000 - Docs: Complete sentence, link to general SSL troubleshooting info !2994 - Update runner readmes to index !2990 - Added note for Overwriting Kubernetes Namespace section !2984 - Mention liveness project when adding Windows runners !2981 - Add details on how to assign Runner Manager to security fork project !2974 - Docs: Updated Shell topic titles to be more clear !2972 - Update Kubernetes execution strategy documentation !2966 - Fix outdated VS Code package recommendation !2964 - Add docs about DEB/RPM packages signature verification !2963 - Docs: Specify exact Parallels product names !2960 - Provide JSON job response file for custom executor. !2912 (Paul Bryant @paulbry) - Add instructions for proxying the GitLab registry !2865 - Fix typo/incorrect grammar !2842 (Per Lundberg @perlun) ## v14.0.0 (2021-06-19) ### New features - Send GPU config string !2848 - Add support for selective Git submodule paths inclusion/exclusion !2249 ### Bug fixes - Fix race blocking goroutine in shell executor !2910 - Order masked values by length to prevent longer values being partially revealed !2892 - Kubernetes attach strategy hangs when log file is deleted !2824 ### Maintenance - Enable Kubernetes attach strategy by default !2955 - Add ASDF .tool-versions file !2948 - Make check test directives depend on prepare_done !2947 - Fix broken test output produced by MakeFatalToPanic !2929 - Use main branch for docs reviews !2925 - Disable windows anti-malware monitoring !2920 - Remove FF_RESET_HELPER_IMAGE_ENTRYPOINT feature flag !2906 - Remove legacy process termination for shell executor !2905 - Pull helper image from GitLab registry by default !2904 - Pwsh shell support for Kubernetes when legacy execution strategy ff is set to false !2902 - Remove offpeak settings Docker autoscaling !2897 - Add shell benchmarks !2894 - Make pwsh the default shell for new registrations !2889 - Remove FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER feature flag !2887 - Remove deprecated Makefile targets !2885 - Update Kubernetes client-go library to 0.21.1 !2878 - Segregate `unit test` job into a separate `integration test` job !2783 - Add supported failure reasons for build errors !2744 - Upgrade kardianos service !2729 - Enable fastzip & progress meter !2565 - Allow building behind a proxy !2168 (dHannasch1 @dHannasch1) ### GitLab Runner distribution - Remove support for Windows 1909 !2924 - Remove support for Windows 1903 !2915 - Remove Ubuntu/eoan package !2888 - Publish Windows helper image :latest tags !2879 - Add Ubuntu-based runner-helper image !2835 ### Documentation changes - Add troubleshooting note on GitLab-runner symlink removal !2953 - Disable skel directory usage by default for DEB/RPM installation !2942 - Update PROCESS.md referencing runner release helper templates !2939 - Add tlsctl to runner docs !2937 - Remove old redirects !2933 - Update troubleshooting documentation for old Docker versions on Windows Server !2927 - Add remove_date to YAML frontmatter !2922 - Revert (arch) change !2918 - Shell executor: Link to latest available Git version !2917 - Be more specific about Windows version support !2916 - Made images smaller !2909 - Add troubleshooting steps to GitLab Runner operator !2901 - Fix external links !2895 - Fixed typo in Docker runner documentation !2891 - Use DNS option in TOML to avoid proxy and route in docs !2815 - Change order of steps to configure autoscaling !2665 - Update docs/executors/Kubernetes.md !1903 ### Other changes - Remove conversion between failed and cancelled buildStage for Prometheus metrics !2932 - Delete unused 1909 allowed test failures !2928 - Updated test case names for TestBuildJobStatusEnvVars !2907 (listout @listout) - Specify the working version for lsif-go image !2898 - Remove /usr/lib/GitLab-runner symlink from packages !2890 - Make Git-lfs tar checksum usage coherent !2268 ## v13.12.0 (2021-05-20) ### New features - Support Git strategy with Kubernetes executor !2862 ### Bug fixes - Add utf-8 invalid replacement encoder to trace transformers !2881 - Pass PowerShell scripts as a file to shell executor !2874 - Add new eval execution strategy for capturing exit code !2818 ### Maintenance - Revert "Publish Windows helper image :latest tags" !2880 - Use latest docs linting images for testing !2877 - Ensure Docker client is in experimental mode !2870 - Improve trace masking performance !2863 - Use PowerShell for resolving paths !2836 - Move commands package integration tests to own files !2795 - Allow whole Vault Secret configuration to be variable-expandable !2772 - Update coverage and windows tests to rules !2756 ### Documentation changes - Changed ${arch} to $(arch) !2875 - Fix TOML syntax in Kubernetes documentation !2872 - Convert pull policies bolds into headers !2867 - Update GitLab Runner connectivity !2866 - Update Kubernetes pull policy documentation !2860 - Document operator properties, custom installation and permissions !2847 - Clarify, N-to-M relationship of Runners to GitLab instances !2788 ## v13.11.0 (2021-04-20) ### New features - Allow user to specify multiple pull policies for Kubernetes executor !2807 ### Bug fixes - Use inspect.GID() to collect GID value in Docker executor !2769 - Fix Kubernetes attach strategy for non-root environments !2749 ### Maintenance - ci: wrap GOCACHE with double quotes !2859 - Add bridge job to runner-incept !2845 - Fix archives/zip tests on Windows !2832 - Report CI test build failures !2829 - Add job URL to container labels !2823 - Compile GitLab-runner binary for shell integration tests !2820 - Don't return error when checking feature flag !2812 - Simplify the triggerring of 'GitLab-docs' Review App pipelines !2809 - Fix Git 1.8.3.1 job errors !2791 - Fix job duration value when in log format is JSON !2787 - Add support for CSI volumes !2784 (Brandon Butler @brandonbutler) - Move Kubernetes integration tests to separate file !2779 - Support for env variables expansion for image name for Kubernetes executor !2778 - Segregate integration tests in Docker executor !2776 - Remove test with int overflow !2597 (Fábio Matavelli @fabiomatavelli) - Adding Git depth for submodules !2107 (Nico Bollen @bollenn) ### Documentation changes - Removed reference to master !2855 - Fix pipeline configuration for docs branches !2853 - Make clear when FF runner configuration syntax was introduced !2852 - Roughly alphabetised Docker Container parms - docs !2851 (Raimund Hook @stingrayza) - Updated docs reference to MinIO !2850 (Raimund Hook @stingrayza) - Documentation Update/docs/security/index.md !2849 (Anshuman Singh @singhanshuman) - Add clarification on Runner and GitLab Version match !2841 - Edited for style !2838 - More edits for style !2834 - Add services note to Windows container troubleshooting !2833 - Edited for grammar and style !2830 - Moved troubleshooting to OS topics !2819 - Fix heading type in GPU documentation !2817 - pWIP: Add configuration section to docs for runner Operator on OpenShift !2816 - Add feature flags in config.TOML !2811 - Update links to redirected files !2808 - Add a note to docs on usage of Docker script from `13.9` !2806 - Remove Docker-machine provision on creation failure !2805 - Improve documentation for GPUs for all executors !2804 - Update redirected links in runner docs !2802 - Add troubleshooting section in Kubernetes executor documentation !2799 (Vincent Firmin @winkies) - Edited for style and consistency !2777 - Document how to customize environment variables for Runner !2775 ### Other changes - Update warning message URL for DockerHub !2844 ## v13.10.0 (2021-03-21) ### Bug fixes - Don't print DockerHub helper image warning when custom image is defined !2761 - Allow graceful termination on Windows !2739 ### Maintenance - Include symbols in GitLab-runner binary !2800 - Move process package integration tests to own files !2794 - Update `code navigation` job definition !2792 - Rename shell_writer_test.go to reflect use !2782 - Move virtualbox executor integration tests to own file !2781 - Move parallels executor integration tests to own file !2780 - Update trace limit wording !2765 - Update of Docker error message !2759 - Add integration tests for trace limit handling !2758 - Add integration tests for build trace masking !2754 - Version pin pwsh version inside of our CI tests !2748 - Update hashicorp/go-version dependency to v1.2.1 !2746 - Removal of unused replace from go.mod !2745 - Start updating runner pipeline to rules !2728 ### Documentation changes - Add mentions to pwsh to documentation !2797 - Update Vale rules !2789 - Add mention to pwsh support in Docker executor docs !2786 - Fix example gcp zone for Docker+machine config !2771 - Runner: Update spelling exceptions list !2770 - Docs for installing runner on a separate machine !2767 - Update docs/monitoring/README.md !2766 - Fix misspelling of "Force" in PowerShell examples !2764 (Gabriel Smith @yodal\_) - Add runner execution flow diagram !2760 - Fix duplicate labels in CONTRIBUTING.md !2747 (h.yoshida @hirokiyoshida837) - Add backticks around --Google-accelerator docs !2742 - Update documented check internal for config changes !2741 - Add documentation for using GPUs with Docker Machine !2736 - Update MachineOptions to only mandatory configuration. !2673 ## v13.9.0 (2021-02-22) ### New features - Enable PowerShell Core support in Kubernetes Executor !2705 - Enable PowerShell Core support in Docker Executor on Linux !2563 - Add support for setting the artifact/cache compression level !2684 - Display feature flags that are set to a non-default status !2606 - Add GPU support for Docker executor !1955 (Andreas Gravgaard Andersen @agravgaard) ### Security fixes - Remove skipVerify from client struct !2654 ### Bug fixes - Fix panic when PKCS7-encoded payload has no certificate !2737 - Correctly set fastzip's staging directory !2693 - Improve trace secret masking with x/text/transform !2677 - Add explicit bash shell error checks !2671 - Terminate requests on process shutdown !1684 ### Maintenance - Change env to bash to resolve Illegal option !2732 - Upgrade Docker version to 20.10.2 !2722 - Update Docker script default to Docker prune volumes !2720 - Default to no Docker image compression in local environment !2717 - pwsh scripts can be passed over STDIN on shell executor !2715 - Update GitHub.com/Docker/cli dependency !2714 - Add artifact and cache download progress meter !2708 - Remove requirement for Docker daemon experimental mode from image build scripts !2707 - Fix the image that is used to create the pwsh tag !2706 - Exclude out/binaries/GitLab-runner-helper from binaries artifacts !2703 - Improve logging to packagecloud push !2702 - Upgrade PowerShell Core to 7.1.1 !2696 - Make TestHelperImageRegistry not need real prebuilt images !2682 - Add test for permissions container in k8s !2676 - Add object storage cache credentials adapter !2674 - Add artifact/cache upload progress meter !2670 - Refactor Docker pull logic into dedicated package !2659 - Update to Docker client v20.10.2 !2658 - Update GitLab-terminal package !2656 - Create separate helper image with PowerShell Core !2641 - Prioritize helper image specified in config to change K8s log dir permissions !2578 (naruhito @naruhito1) - Remove helpers/trace redundant io.Pipe use !2464 - Kubernetes tests simplify build creation !2445 - Report deleted pods as a system failure with attach strategy !2444 - Fix incorrect path/filepath use !2313 - Improve Docker cleanup script to also include old builds and images !2310 - Output coverage to Cobertura report !2252 - Version pin pwsh version inside of our CI tests !2748 - Add integration tests for trace limit handling !2758 - Add integration tests for build trace masking !2754 ### Documentation changes - Document how to view Windows service logs with cli !2733 - Update linux-manually.md !2731 (Simon Carr @simonjcarr) - Added details about guided install !2730 - Use correct Vale extension in VS Code ext file !2727 - Refresh Vale linting rules !2726 - Specify tag syntax for tagged releases !2725 - Add note about permissions !2723 - do not link to unmaintained Docker image cleanup app !2712 (Antoine Beaupré @anarcat) - Fix formatting of FF_USE_LEGACY_Kubernetes_EXECUTION_STRATEGY !2701 (Ben Bodenmiller @bbodenmiller) - Clarify download instructions !2700 - Replace x86 and amd64 with 32-bit and 64-bit terminology for download !2699 - Add buildImage in the default OpenShift operator example !2698 - 1/3 Add crosslink to Kubernetes Agent docs !2697 - docs: Clarify self-signed certs on windows !2695 (Stefan Schmalzhaf @the_s) - Docs: Fix minor whitespace inconsistency !2694 (Stefan Schmalzhaf @the_s) - 27451 Fix Documentation - podAnnotation should be a TOML table !2692 (Benjamin Souty @B-Souty) - Split docs linting jobs !2689 - Docs: Links documentation to working example for CMD Shell usage on Windows GitLab Runners where only PowerShell can be the default !2687 - Documentation - Supported OS updates !2683 - Whole hour periods for autoscale !2681 - Mention version sync on first sign of trouble !2680 - Fix typo in Kubernetes.md !2675 - Removed extra spaces !2672 - Update install runner on Kubernetes install page - docs !2668 - Simplification of dind service section !2663 (Keith Kirkwood @keithkirkwood) - Instructions for installing dependencies on CentOS. !2619 (David Hannasch @dHannasch) - Include in docs details about the updated script !2586 - Changed recommendation to instance type in docs to a smaller one !2579 (Jan Pobořil @janpoboril) - Document known race condition about Helm upgrade !2541 - Improve TLS custom cert documentation !2487 ### Other changes - Add CODEOWNERS for 3 files at repo root !2667 - Revert "Improve trace secret masking with x/text/transform" !2752 ## v13.8.0 (2021-01-20) ### New features - Allow user to specify multiple pull policies for Docker executor !2623 ### Bug fixes - Fix fastzip to support artifacts for nonroot users !2661 - Fix s3 cache upload for aws EKS IRSA !2644 (Clemens Buchacher @cbuchacher) - Fix cache push for failed jobs for Docker and Kubernetes executor !2638 (Axel Amigo @hax0l) - Fix Azure cache not working in K8S executor !2626 - Fix path checking in Build.getCustomBuildDir !2251 ### Maintenance - Add Docker integration tests for cache push for failed job !2657 - Report that the Runner returns exit codes !2645 - Update GoCloud to v0.21.1+ !2637 - Add tests to PowerShell shell !2634 - Lock mutex in Buffer.SetLimit !2627 - Fix/k8s skip hostaliases for empty services !2582 (Horatiu Eugen Vlad @hvlad) - Fix windowsPath to handle local named pipes correctly !2470 - Override Git HTTP user agent !2392 - Allow using prebuilt Docker helper images when running from out/binaries !2104 ### Documentation changes - Finish runner standardization update !2666 - Update linux-repository.md changes date of end of life date of CentOS 8 !2662 (Mohammad.E @emamirazavi) - Removed ntrights reference !2660 - Restructure "Supported options for self-signed certificates" doc section !2651 - Edited runner to be lowercase !2650 - Edited runner to be lowercase !2649 - Edited runner to be lowercase !2648 - Edited runner to be lowercase !2647 - Edited runner capitalization !2646 - Fix documentation issue in Kubernetes node_selector !2643 - Update docs for the new GitLab Runner operator !2640 - Synchronize Vale rules and fix !2633 - Improve documentation for configuring the cache with a K8S runner !2632 - Add Azure to possible runners cache type !2631 - Add reference to GitLab Runner Operator and 13.7 MVC issue !2630 - Add `make runner-and-helper-docker-host` to `make help` !2629 - Add troubleshooting guide for GitLab Runner !2628 - Runner: add Vale test for possessive form of GitLab !2624 - Docs: Removed possessive GitLab's !2620 - Runner: fix unquoted curl command URL strings !2618 - Runner: move CurlStringsQuoted.yml rule to error !2617 - Add Windows Server Core command for logs !2602 - Fixed typo: libivrt -> libvirt !2519 (Aaron @aaronk6) - Autodetect VirtualBox path on Windows !2020 (Pedro Pombeiro @pedropombeiro) - Update Kubernetes.md to reflect !1470 ### Other changes - Add missing entry to 13.6 changelog !2642 ## v13.7.0 (2020-12-21) ### Security fixes - Updating min TLS version to 1.2 !2576 - Replace umask usage with files permission change when a non-root image used !2539 ### Bug fixes - Upgrade fastzip to v0.1.4 !2605 - Remove .Git/config.lock in build directory !2580 - Fix attempting Kubernetes Docker registry secret cleanup on failed creation !2429 ### Maintenance - Gracefully fail unexpected Stream() calls !2609 - Update lowest Git version support inside of CI !2600 - windows: Don't log crypto/x509: system root pool warning !2595 - Add .editorconfig !2588 - Use helper image to change K8s log dir permissions !2573 - Fix check_modules command !2572 - Replace assert.True and errors.Is with assert.ErrorAs/ErrorIs !2571 - Exclude secure jobs from docs pipelines !2564 - Submit exit code back to Rails when a job fails !2562 - Fix dead URL in Docker.go !2557 (Victor Mireyev @AmbientLighter) - Pin StefanScherer/windows_2019_Docker box to 2020.04.15 !2555 - Pull helper image from GitLab.com registry !2554 - Update testify package to version supporting errors.Is/As directly !2537 - Introduce Docker internal user package !2534 - Introduce Docker internal exec package !2533 - Send build trace bytesize in the final build update !2521 - Support Pod DNS policy for Kubernetes executor !2477 - Support Pod DNS Config and Policy for Kubernetes executor !2473 - Add support for Windows Server Core 2004 !2459 (Raphael Gozzo @raphaelgz) - Ensure that runner is unregistered on registration failure !2447 - Make runner-and-helper-Docker-host use host arch and os. !2432 (Horatiu Eugen Vlad @hvlad) - Improve cache upload speed !2358 (Erik Lindahl @erik.lindahl) - Disable syslogging by default for systemd systems !2333 (Matthias Baur @m.baur) ### GitLab Runner distribution - Publish Docker images to ECR public !2608 - Add job to create ecr token for pipeline !2607 - Install aws cli in CI image !2599 ### Documentation changes - Removed spaces from diagram !2616 - Remove alert box vale rules !2613 - Add interaction diagram to Kubernetes executor docs !2612 - Changed format of alert boxes !2610 - Fix unescaped characters in a table !2604 - Correct grammar/spelling errors in advanced configuration !2603 - Removed one-sentence topics !2601 - Fixed error in `config example` !2598 - Fix indentation of runners.cache in Kubernetes.md !2592 (Yorgos Oikonomou @yorgos..oik) - Fixed Vale future tense issues !2585 - Fixed Vale future tense errors !2584 - Moved Kubernetes keywords into sub-tables !2583 - Commented out modal install window details (2 of 2) !2577 - Fix trailing space issues in docs !2569 - Fix broken links in the GitLab-runner docs !2568 - Fix typo in monitoring documentation !2556 (Horst Gutmann @h.gutmann) - Add documentation on how to add a new Windows version !2498 - Updated compatibility table !2489 - Update index page for style !2484 - Allow specifying `basefolder` when creating virtualbox VM !2461 (Jack Dunn @JackDunnNZ) - Runner guided install (2 of 2) !2460 - Allow to set extra hosts on Kubernetes executor !2446 (Horatiu Eugen Vlad @hvlad) - Updates documentation to highlight that SELinux can cause errors in the "Prepare Environment" state !2309 (Sean McNamara @seanmcn) - Update AWS autoscale docs for clarity !1820 - Update generated PowerShell script example mkdir !1565 - Add advice on network segmentation !1404 ### Other changes - Update GitLab Changelog configuration !2615 - Remove product from product-categories URL from template !2611 ## v13.6.0 (2020-11-21) ### New features - Add labels to cache-init Docker container !2412 - Expose custom executor services with $CI_JOB_SERVICES !1827 (Jovan Marić @jovanmaric) - Enable PowerShell Core support in Docker-Windows executor !2492 ### Maintenance - Expose ci job services as custom !2550 - Publish helper images to registry.GitLab.com !2540 - Allow user to define command and entrypoint to services from config !2525 - Consolidate helper exe location for Dockerfile build !2501 - Fix Azure cache uploads using Go Cloud !2500 - Fix definition of security related jobs !2499 - Move doc/dependency_decisions.yml file to a better place !2485 - Fix TestBuildCancel from timing out !2468 - Teach artifact/cache commands about the archive interface !2467 - Improve build logging testing !2465 - Skip CleanupFileVariables stage if no file variables !2456 - Change in interactive --URL question to match docs !2431 - Added SubPath support to Kubernetes volume definitions !2424 (Matt Mikitka @mmikitka) - Add fastzip archiver/extractor !2210 - Implement archiver/extractor interface !2195 - Manage driver defined job variables in custom executor !2032 (Paul Bryant @paulbry) - Update doc about release windows image script !1561 ### Documentation changes - More Vale rules updates !2552 - Clarify windows install instructions !2549 - synchronize Vale rules and fix !2547 - Add reference to config.TOML for setting Docker image pull policy - docs !2545 - Remove extra parentheses !2542 (Ben Bodenmiller @bbodenmiller) - Docs: Rename and redirect docs/install/registry_and_cache_servers.md !2535 - Add stage / group metadata to docs pages !2528 - Add mention that registry mirror is started as HTTP not HTTPS !2527 - Elaborate on Docker mirror, and link to Docker doc !2526 - Docs: Redirected custom executor index page !2522 - Docs: Changed bullets to a table !2517 - Added docs for using a configuration template in the Helm chart !2503 - Update vale rules !2502 - Use latest docs linting image !2497 - Docs: Updated top-level page !2496 - Update link to runner helper image in documentation !2494 (botayhard @botayhard) - Change mention of custom cache containers to volumes !2491 - Add missing supported architectures for Runner helper !2490 - Update [runners.machine] section in Autoscaling GitLab Runner on AWS EC2 documentation !2480 - Provide a full list of metrics available for GitLab runners in the documentation !2479 - Clarify how service_account in TOML is used !2476 (Ben Bodenmiller @bbodenmiller) - Introduce usage of Runner Manager terminology !2474 - Docs: Revamp Runner home page !2472 - Update Kubernetes' documentation to include ephemeral storage requests/limits !2457 - Add Kubernetes runners allowPrivilegeEscalation security context configuration !2430 (Horatiu Eugen Vlad @hvlad) - Update Runner registry and cache documentation page !2386 - Cap maximum Docker Machine provisioning rate !1038 (Joel Low @lowjoel) ## v13.5.0 (2020-10-20) ### New features - Allow runner to archive cache on failure !2416 - Add job status environment variables !2342 - Add labels to Docker cache volumes !2334 - Set k8s runner ephemeral storage requests and limits !2279 ### Bug fixes - Docker executor: return error on pull/import failures !2113 - Fix path separator for CI_PROJECT_DIR in Windows in bash shells !1977 ### Maintenance - Ensure that for abort only abort is called !2463 - Detach runtime state/metric from CI_JOB_STATUS !2462 - Update stretchr/testify library to fix flaky test !2450 - Report Kubernetes pods' conditions when they're pending !2434 - Move variable creation out of specific resolver implementation !2413 - Test more executors in TestAskRunnerOverrideDefaults !2406 - Test for detecting overriding of CI server values !2403 - Support 'canceling' remote job status. !2377 - Add basic fuzz tests as part of dogfooding coverage guided fuzzing !2347 - Standardize indentation in YAML code !2328 - Use newest helper image version in tests !2223 - Update calls for SkipIntegrationTests to not return !2065 - Setup secure jobs !1897 - Disable secret_detection job !2471 ### Documentation changes - Doc `cleanup_file_variables` for custom executor !2455 - Link Azure storage container docs !2454 - Use Google driver for examples !2442 - Fix typo in k8s read_only config flag documentation !2441 - Docs: Removed extra notes !2440 - Removed many of the notes !2439 - Harmonize docs linting rules !2435 - Docs: Fixed here links and added metadata !2425 - Minor edits of recent edits !2423 - Remove contractions linting rule !2421 - Docs: Edits for Vale rules and other style !2420 - Documentation: Add log level mention to troubleshooting !2419 - Switch autoscaling Docker Machine examples to GCP and Ubuntu !2417 - Add troubleshooting about windows mapped drives !2415 - Docs: Updating metadata !2405 - Docs: Update Docker tables to clarify what's supported !2404 - Update default install docs to disable skel !2402 - Docker version requirements in Windows Server !2401 - Document vargrant-parallels plugin and add clone instructions !2399 - Changing Kubernetes executor service-account command !2312 ## v13.4.0 (2020-09-18) ### New features - Add Hashicorp Vault secret resolver !2374 - Add Hashicorp Vault integration package !2373 - Add Hashicorp Vault golang library !2371 - Add secrets handling abstraction !2370 ### Bug fixes - Improved interrupt/cancelation build tests !2382 - Fix Windows runner helper Docker container !2379 - Fix metric reading race conditions !2360 - Record only first resolved credentials for each Docker registry !2357 - Ensure PowerShell file variables contain no BOM !2320 ### Maintenance - Use consts for job state in TestUpdateJob !2397 - Support trace rewind !2390 - Support update interval on update job !2389 - Introduce `UpdateJobResult` and `PatchState` !2388 - Fix check_mocks make target !2387 - Update docs pipeline to use new image !2384 - Add support for custom PUT HTTP headers in cache archiver !2378 - Send trace checksum on job updates !2375 - Update node affinity tests assertions !2369 - Add test for cache archiver shell execution !2367 - Update log message for starting VM in Parallels executor !2361 (Per Lundberg @perlun) - Fix changelog generator config to catch all maintenance related labels !2359 - Update log message for starting VM in virtualbox executor !2356 (Per Lundberg @perlun) - Remove trailing spaces check !2352 - Replace whitelist terminology with allowlist !2338 - Use configured userns mode for services !2330 (Lukáš Brzobohatý @lukas.brzobohaty) - Add Kubernetes node affinities settings !2324 (Alexander Petermann @lexxxel) - Re-enable windows Docker tests !2308 - Use new function to create Docker client !2299 - Add Secrets entry to job payload structures !2288 - Remove redundant Docker executor integration tests !2211 - Add missing assert for mock !2116 - Allow overwriting Service and Helper container resources !2108 (Renan Gonçalves @renan.saddam) - Use parallel compression and decompression for Gzip archives and caches !2055 (Ben Boeckel @ben.boeckel) - Add variable to enable fallback cache key !1534 (Erik Lindahl @erik.lindahl) - Print Docker image digest !1380 (David Nyström @nysan) ### Documentation changes - Update docs-lint job to use latest image. !2398 - Add note not to use AWS security group ID with Docker machine !2396 - Docs: improve documentation grammar !2395 (Jonston Chan @JonstonChan) - Fix grammar in documentation index page !2394 (AmeliaYura @AmeliaYura) - Add documentation on how to use Ubuntu image in Kubernetes !2393 - adding a tip on configuring timestamp in Docker runner !2391 - Docs: Fix misspelled word !2383 - Update Vale and markdownlint rules !2380 - Docs: Fix minor typo in Registering runners page !2376 - Add Azure Blob Storage support for cache !2366 - Add note to docs about using shell executor when building macOS/iOS apps !2365 - Cleaned up some of the wording for macOS install !2364 - Document node affinity !2363 - Change order of headers in exec docs !2362 - Docs: Edited Fargate doc !2355 - Fix broken link !2354 - Update Kubernetes.md documentation replace example gitlabUrl !2353 (Tyler Wellman @tylerwel) - Fix section numbering in docs/development !2349 - CONTRIBUTING.md: fix FreeBSD label !2348 (Kenyon Ralph @kenyon) - Use `shell` instead of `bash` for Markdown !2345 - Update Registering Runners page !2337 - Add documentation for configuring private registries with imagePullSecrets !2131 (Tom Bruyninx @TomBrx) ### Other changes - Clarify --help text for --ID flag !2385 ## v13.3.0 (2020-08-20) ### Bug fixes - Install Runner in /usr/bin and helper in /usr/lib in Linux !2329 - Fix PowerShell #requires use !2318 - Fix untagged registration and add regression tests !2303 - Add openssh-client to Docker images !2281 - Use container ID, not name, for service's healthcheck hostname !2118 ### Maintenance - Add security harness !2315 - Move GitLab release to its own job !2314 - Fix typo for security branch !2304 - Add MR piplines for security fork on master !2301 - Add release jobs to security fork !2300 - Add security issue and merge request templates !2298 - Refresh linting rules !2297 - Make `.stage_done` available also on docs MRs !2295 - Remove needs from feature flags docs job !2293 - Fix DAG dependencies of release jobs !2289 - Run Docker import for helper-dockerarchive-host !2275 - Update changelog generator to accept new labels !2271 - Fix typo in DUMB_INIT_S390X_CHECKSUM variable name !2270 - Cache GOCACHE in CI !2187 - Enable DAG for some jobs !2076 - Upgrade Git version !2306 - Update Ubuntu Docker container to Ubuntu 20.04 !2286 (Markus Teufelberger @markusteufelberger) - Log additional Docker-machine prep/cleanup info !2277 ### Documentation changes - Synchronize lint rules and fix where required !2341 - Fix name script !2339 (Andros Fenollosa @tanrax) - Document how to renew GPG key !2336 - Update Documentation template to reflect standard !2332 - Fix broken external links !2331 - Document security release process !2322 - Fix incorrect Fargate cluster name !2321 (Matt Breden @mattbred56) - Added specific token steps !2317 - Update docs.GitLab-ci.yml to use trigger-build script !2311 - Add content describing Runner behavior for changes to config.TOML - docs !2307 - Made links descriptive !2302 - Creation of OpenShift Runner doc. !2296 - Removed accidentally commited installation instructions in 13.2 !2290 - Update info about support Linux/OS/archs !2287 - Add explicit location for Windows logs !2285 - Fix link to TOML docs Array of Tables. !2280 (Bheesham Persaud @bheesham) - Added architecture info !2278 - Fixes mixed-case anchor !2272 - Make it clear which Fargate container should have the specific name !2269 - Update a link to download the latest Fargate driver version !2259 (Ricardo Mendes @ricardomendes) - Replace backticks with bold for UI elements !2099 - Add an ENTRYPOINT script to the helper image Dockerfiles to add CA certificates !2058 ## v13.2.0 (2020-07-20) ### New features - Publish a GitLab Runner Docker image for Linux on IBM Z !2263 - Pass `multi_build_steps` as a Runner Feature when requesting a job !2213 - Leverage Docker buildx for the helper image and build for s390x !2206 - Enable PowerShell Core support in Shell Executor !2199 - Build and release binary for s390x !2196 - Label Docker networks in the same way as containers !1930 - Tag helper image with runner version !1919 (Fábio Matavelli @fabiomatavelli) ### Bug fixes - Fix Kubernetes runner timeout when the image name is invalid !2197 (Matthias van de Meent @matthias.vandemeent) - Update Git TLS settings to be configured for repo URL, not GitLab URL !2111 - Fix support for UNC paths in PowerShell executor !1976 (Pedro Pombeiro @pedropombeiro) - Set EFS flag to indicate that filenames and comments are UTF-8 encoded !1325 (Kazunori Yamamoto @kaz.yamamoto) - Add openssh-client to Docker images !2281 ### Maintenance - Unsilence the `make lint` target !2245 - Fix warnings reported by goargs linter !2233 - Fix shellcheck linter reported issues !2232 - Add goargs to CI build !2224 - Replace gocyclo linter with gocognit !2217 - Enable Windows tests for community MRs !2215 - Report `panic` failures in CI tests !2212 - Fix integration tests on Windows that rely on Git version !2207 - Enable optional checks in gocritic linter !2162 - Enable shadowing checking in govet !2150 - Enable funlen linter !2149 - Enable goprintffuncname linter !2148 - Enable nakedret linter !2143 - Enable nestif linter !2142 - Enable line limit linter !2141 - Dockerfiles restructuring !2114 - Rename trace.Fail to trace.Complete !2102 - Remove duplication from build_test.go !1843 - Ensure CI image is built if CI_IMAGE value changes !2267 - Retry helper image build !2265 - Remove `GOLANGCI_LINT_CACHE` usage !2257 - Remove unnecessary indentation in method !2256 - Update alpine image version in `static QA` job to 3.12.0 !2255 - Write diagnostics for missing `make development_setup` call !2250 - Run PSScriptAnalyzer on PowerShell scripts !2242 - Fix helper-Docker target !2226 - Fix code navigation job to wait until the image job is done !2221 - Fix a spelling error in the bug template and tidy up some other wording !2219 - Standardize Makefile target names !2216 - Fix data race in TestDockerCommandBuildCancel !2208 - Add native code intelligence block to CI !2201 - Speed up `clone test repo` job !2192 - Fix flaky TestListenReadLines in log processor !2191 - Run Kubernetes integration tests !2155 - Enable unparam linter and fix reported errors !2135 - Enable errcheck linter !2134 - Fix Dockerfile issues reported by halolint !2106 - Fix out-of-date test expectations !2012 - Update entrypoint shebang for Docker images !1780 (J0WI @J0WI) - Reduced layer count on Windows helper images !1777 (Alexander Kutelev @kutelev) - Update to alpine v3.12 !1763 ### Documentation changes - Docs: Updated note to add install from UI instructions !2264 - update "screenshot" of running GitLab-runner without arguments. from version 1.0.0 to 13.0 !2262 (@mxschumacher @mxschumacher) - Session server listen on IPv4 and IPv6 !2260 - Update documentation for helper image tags !2258 - Synchronize lint rules !2254 - Update custom executor docs with `step_*` !2253 - Docs: Fixed Git commands !2244 (Stefan Zehe @szehe) - Docs: Updated broken links !2240 - Adjust metadata and move page !2235 - Docs: fix broken external links !2234 - Fix Debian container path and SSH port in the Autoscaling GitLab CI on AWS Fargate guide !2230 - New config for Vale and markdownlint !2214 - Note that Interactive Web terminal don't work with Helm yet !2189 (Ben Bodenmiller @bbodenmiller) - Update doc for Autoscaling GitLab CI on AWS Fargate, adds troubleshooting section. !2188 ( Rob @rwd4) - Update Fargate Task connection info in autoscaling aws fargate doc !2181 - Review Handbook page: /runner/configuration/tls-self-signed.html !2170 - Add docs how to use k8s secrets for registration !2154 - Update index.md to include documentation in for the `--access-level` param values !2137 ## v13.1.0 (2020-06-19) ### New features - Fix file archiver message to include directories !2159 - Use direct-download on a first attempt for artifacts !2115 - Add full support for artifacts/exclude feature !2110 - Add data format definition for build / artifacts / exclude !2105 - Add support for `direct_download` artifacts !2093 - Publish Windows 1909 helper image !2086 - Support runner predefined variables inside overwrite variables Kubernetes !2069 - Add Centos8 and Ubuntu 19.10 & 20.04 packages !2002 - Change default Git fetch flags allowing user to overwrite them !2000 (Łukasz Groszkowski @falxcerebri) - Run any step from job response in a separate BuildSection !1963 ### Bug fixes - Fix missing logs from Docker executor !2101 - Fix automatically adding cache directory when cache disabled on register !2091 (Max Wittig @max-wittig) - Fix millicpu comparison for maxOverwrite !2019 - Make commander start process group for each process !1743 - Extract commander from custom executor !1654 - Extract process killing from custom executor !1653 ### Maintenance - Increase allowed data races !2204 - Fix test assertions for k8s integration tests !2171 - Increase allowed data races !2164 - Fix TestDockerCommandUsingCustomClonePath for Windows !2153 - Rename network manager file for Docker executor !2147 - Enable staticcheck linter !2136 - Update GitLab CI image to include Git LFS !2124 - Implement Is for \*BuildError !2121 - Update log message for failure of removing network for build !2119 (Max Wittig @max-wittig) - Change license management to use rules !2096 - Use Docker client's ContainerWait !2073 - Use taskkill windows !1797 - Cleanup dependencies for alpine based Docker image !1778 (J0WI @J0WI) ### Documentation changes - Add all Vale rules from main GitLab project !2203 - Docs: Fix distribution order !2200 (Martin @C0rn3j) - Update the register page to use the correct Docker registration commands - docs !2186 - Sync spelling exceptions list from GitLab project !2184 - Docs: fix broken links in Runner docs !2183 - Remove reference to lack of arm64 Docker images !2178 - Fix documentation TOML examples with [[runners.machine.autoscaling]] !2177 - Update GitLab Runner in a container documentation to prevent errors in using the Runner image - docs !2175 - Docs: Edited runners.cache.s3 details !2167 - Add example logs for `runner` and `json` log-format options - docs !2163 - Adds workaround for env vars in config.TOML !2156 - Update redirected links !2152 - Add Docker to capitalization rules !2146 - Include MachineName and MachineDriver in autoscaling example !2140 - Specify pull policy for Kubernetes executor !2129 - Improve Batch deprecated details !2128 (Ben Bodenmiller @bbodenmiller) - docs: Link to example of how to color PowerShell output !2127 (Ben Bodenmiller @bbodenmiller) - Docs: removed Ubuntu from LXD instructions !2126 - Refresh Vale rules !2125 - Adds note about the image for AWS Fargate !2100 - Add GDK to capitalization rules !2097 - Docs: edited autoscaling period content !2094 - Drop mention of 'OffPeakPeriods' from 'docs/faq/README.md' !2092 - Skip build stages that have no operations !2081 - Add vale plugin to recommended VS Code extensions !2078 - AWS Fargate guide walkthrough !2075 - Mark Prepare environment stage as system failure !1915 - Expose Code coverage report artifact !1863 - Send `SIGTERM` then `SIGKILL` to process in Shell executor !1770 - Publish Windows 1903 helper image !1634 ### Other changes - Fix data race in TestNewReadLogsCommandFileLogStreamProviderCorrect !2193 - Fix building of Windows helper image !2180 - Rename ill-named script variable in release_Docker_images !2173 - Change alpine mirrors to default mirrors for arm/arm64 !2165 - Skip flaky log processor test TestResumesFromCorrectSinceTimeAfterSuccessThenFailure !2151 - Enable gocritic linter !2145 - Return error from k8s `limits` function when parsing resource limits !2144 - Upgrade golangci-lint to v1.27.0 !2139 - Pass an explicit context path to Docker build in `build_ci_image` !2133 - Fix error when using attach strategy and ErrSkipBuildStage is returned when generating script !2123 - Revert removal of Windows Batch support !2112 - Do not log warning if trace update interval header value is empty !2103 - Add retries for runner system failures in CI !2098 - Remove `--kubernetes-services` command line flag !2074 - More verbose logging for artifact uploading !2052 (Sashi @ksashikumar) - Fix file name typo !2049 - Unify Docker registry authentication in Docker and Kubernetes executors !2048 - Improve Kubernetes executor attach strategy command execution and handling by using a new read-logs command in the helper image !2038 - Remove superfluous packages from Ubuntu based Docker image !1781 (J0WI @J0WI) ## v13.0.1 (2020-06-01) ### Bug fixes - Fix missing logs from Docker executor !2101 ## v13.0.0 (2020-05-20) ### Breaking Changes - Remove support for --Docker-services flag on register command !2036 - Remove fedora/29 package !1905 (Fábio Matavelli @fabiomatavelli) - Remove /debug/jobs/list?v=1 endpoint !1894 (Fábio Matavelli @fabiomatavelli) - Remove backported os.Expand() implementation !1892 (Fábio Matavelli @fabiomatavelli) - Remove FF_USE_LEGACY_VOLUMES_MOUNTING_ORDER feature flag !1889 (Fábio Matavelli @fabiomatavelli) - Remove macOS 32 bit support !2051 - Remove support for Windows 1803 !2033 - Remove legacy build directory caching in Docker Executor !2067 - Remove support for array of strings when defining services for Docker Executor !2035 ### New features - Support more glob patterns for artifact/cache !1917 - Add arm64 Docker images for GitLab/GitLab-runner !1861 - Make Docker machine configuration more elastic !1980 - Add support for `direct_download` artifacts !2093 ### Bug fixes - Fix duplicate volume check with trailing slash !2050 - Fix permissions of Docker volumes created by Runner !2047 - Fix removal of build volume when disable_cache set to true !2042 - Fix err checks from volume manager !2034 - Revert "Merge branch '4450-fix-container-wait' into 'master'" !2026 ### Maintenance - Retry Docker build jobs !2087 - Update installation of mockery !2085 - Fix Docker Auth config to be platform agnostic !2077 - Refactor tests in builds_helper_test !2057 - Enable unused linter !2043 - Remove support for array of strings when defining services for Docker Executor !2035 - Update assertion for Docker test !2031 - Add tests for Docker config read when no username is specified !2024 (Andrii Zakharov @andriiz1) - Skip flaky TestDockerCommandRunAttempts until fix is merged !2017 - Remove prealloc linter !2014 - Pin CI jobs to GitLab-org runners !1979 - Replace Code Climate with golangci-lint !1956 - Change license management to use rules !2096 ### Documentation changes - Update capitalization configuration !2084 - Update proxy.md documentation for grammar and clarity !2071 (Kade Cole @kadecole) - Add link to AWS Fargate documentation page !2070 - Adds the link to new AWS Fargate page !2068 - Add more Vale rules to project !2061 - Remove tip alert box !2054 - Added Kaniko reference materials to Runner Helm charts page !2039 - Sync Vale substitutions rules from GitLab project !2029 - Update PowerShell documentation to include video and working example project. !2028 - Handle situation where vale docs-lint error is overwritten by markdownlint success !2025 - Update faq to include firewall troubleshooting !2023 - Add recommended extensions for VS Code !2022 - Move documentation linting to Makefile !2021 - Add section about using TLS with custom CA in regular build scripts !2018 - Sync markdownlint settings from GitLab project !2015 - Fixed Helm search command !2007 (penguindustin @penguindustin) - Improve signals documentation and add a best practice for graceful shutdown !1988 - Make Docker machine configuration more elastic !1980 - Autoscale GitLab Runner on AWS Fargate configuration doc !1914 - Add details about how pull always is still fast and efficient !1885 (Ben Bodenmiller @bbodenmiller) - Correct documentation inaccuracies for `OffPeakPeriods` !1805 (Wes Cossick @wescossick) - Removed `CONTAINER_ID` in prepare.sh, so `CONTAINER_ID` in base.sh is used. !1723 (JUN JIE NAN @nanjj) ## v12.10.0 (2020-04-21) ### New features - Allow Windows 1909 for Docker executor !1999 - Allow windows 1903 for Docker executor !1984 - Add support for `raw` variables !1882 ### Bug fixes - Add attempts to Docker executor for container not found !1995 - Use Docker volumes instead of cache containers !1989 - Use unique container names for Docker executor !1801 ### Maintenance - Fix TestScanHandlesCancelledContext having a WaitGroup without a delta and a few other log processor flaky tests !1961 - Rename `docker_helpers` to `docker` !1943 - Add retry when executing commands with kube attach !1907 - Fix golint issue for error starting with capital letter !1851 - Fix some Windows Docker executor test !1789 ### Documentation changes - Minor Update index.md !2004 (KATO Tomoyuki @tomo667a) - Minor rewording in PROCESS.md templates !2003 - Add further checks from GitLab project !2001 - Add info that SSH is also required to be accessible in the security group !1997 (Daniel Schwiperich @d.schwiperich) - Add Vale version text rule !1994 - Clean up note style !1993 - Fix redirected links in docs !1992 - Updates markdownlint configuration from GitLab project !1991 - Added link to the Git download page !1972 - Pull policy security concerns apply to Kubernetes executors too !1886 (Ben Bodenmiller @bbodenmiller) ### Other changes - Clean Temporary Directories created by the Custom Executor !1978 (Mark McGuire @TronPaul) - Fix broken master for non existent method call !1974 - Rely on `git ls-files` and `git diff` for checking mocks !1973 ## v12.9.0 (2020-03-20) ### New features - Handle 503 status when uploading artifacts and the object storage is unavailable !1887 - Add trigering of GitLab Runner UBI images pipeline !1869 - Add execution stage name in job trace !1847 - Provide rpm/deb package for arm64 and aarch64 !1826 - Expose CI_JOB_IMAGE env var on build environment !1813 - Create network per build in Docker executor !1569 (Steve Exley @steve.exley) - Overwrite Kubernetes resource limits and requests for build container on job level !874 (Nicholas Colbert @45cali) ### Bug fixes - Kubernetes execute commands with attach instead of exec !1775 - Retry Kubernetes commands when "error dialing backend: EOF" error is hit !1867 ### Maintenance - Upgrade Docker client to version 19.03.5 for CI image !1874 - Fix Docker machine executor test filename !1927 - Remove executor\_ prefix from filenames in the executors package !1902 - Fix 'make all' target !1900 - Replace changelog generator script !1888 - Bump MinIO to latest version !1881 (Tom Elliff @tomelliff) - Rename build makefile target to build_all !1873 - Prevent building mips and s390x architectures by default !1872 - Make pipelines running also for X-Y-stable branches !1871 - Add double quotes around bash arguments in ci/release_Docker_images !1865 - Fix release Docker warning !1864 - Fix typo in PowerShell script comments !1862 - Simplify sha256 checksum file creation !1859 - Improve fpm detection !1858 - Replace which command usage !1857 - Convert indentation to spaces for package script !1856 - Update synced path for Windows 10 machine !1854 - Use chocolatey to install software in Vagrant boxes !1853 - Remove redundant type declaration !1852 - Bump to go 1.13.8 !1849 - Add debug logs when setting up pod !1844 - Improve message in Windows version detection !1841 - Set DinD image explicitly to 19.03.5 !1840 - Resolve data race in TestCommand_Run !1839 (Konrad Borowski @KonradBorowski) - Use $(MAKE) instead of make !1825 - Refactor helpers/container/service pkg !1824 ### Documentation changes - Change document title to include EC2 !1912 - Fix typo in advanced configuration docs !1910 (Geo @geo4orce) - Improve `Code format` instructions in the process documentation !1899 - Add fedora 30 to supported OS !1896 - Update docs for Windows services since we support services in network per build !1895 - Fix typo in release process docs !1891 (Ranit @ranit.appcode) - Prevent full disk image copies in libvirt custom executor driver example !1878 (Tom Dohrmann @Freax13) - Interactive Web terminal does not work on Windows !1877 (Ben Bodenmiller @bbodenmiller) - List which executors are at risk of having Runner token & other project code stolen !1876 (Ben Bodenmiller @bbodenmiller) - Allow service alias from config in Kubernetes executor !1868 - Update docs for image variable in custom executor !1866 - Remove bash from codeblock tags !1846 - Improve wording in docs/development/README.md !1837 - Document merge request title requirements and reviewing guidelines !1836 - Add documentation on building from sources !1835 - Update security docs !1833 (masOOd @masood.kamyab) - Update the monitoring document !1831 (masOOd @masood.kamyab) - Change flag to Docker-services !1830 - Document Windows supported versions !1533 ## v12.8.0 (2020-02-22) - Define most jobs as 'pipelines for merge requests' !1747 - Build ci images only on related file changes !1746 - Make referees package mocks to be generated by mockery !1729 - Replace Ruby:2.0/2.1 in examples and test cases with Ruby:2.6 !1749 - Update deprecation warning for runner.Docker.service !1751 - Only run image build jobs on main repo !1754 - Fix docs pipelines for merge requests !1756 - Add CI job to check for outdated mocks !1651 - Doc: Extend the compatibility section !1755 - Update `query_interval` to integer !1752 - Update outdated links it comments !1761 - Refactor referees package !1730 - Update process for issue tracker !1742 - Give custom executor scripts a name !1538 - Update only rule for building CI images !1766 - Change Runner src in vagrant configuraiton for Windows development !1767 - Fix broken CI Pipeline Badge on README !1772 - Typo/Grammar improvements in Docker.md !1757 - Fix casing on log !1774 - Fix link to Vagrant in docs/development/README.md !1773 - Add condition when custom executor term happens in documentation !1771 - Fixed kramdown error !1783 - Rename test file !1784 - Fix `--docker-services` flag in register command !1776 - add space before configuration file name in startup message !1785 - Support rate limiting headers from GitLab API !1728 - Update CHANGELOG for v12.7.1 !1787 - Delete changelog to release post script !1788 - Remove an extra '#' !1791 - Update Kubernetes.md, fix typo in `` !1802 - Update documentation template !1796 - Update AWS spot details in docs !1795 - Change the S3 release index file generator !1803 - Reduce the number for allowed data races !1804 - Fix golint issues for err msgs !1769 - Handle 422 on artifact upload !1794 - Bump Go version to 1.13.7 !1765 - Enabled shared windows runners internal beta !1764 - Fix a typo in S3 release script !1807 - Add one more fix to the S3 release !1808 - Add support for host aliases in services for Kubernetes executor !1680 - Use exec.ExitError.ExitCode() function from go 1.12 !1810 - Fix values.YAML file name in documentation !1812 - Update links to MRs in runner docs !1814 - Update removal date of feature flags to 13.0 !1818 - Increase allowed data races !1815 - Fix panic for exec command !1811 - Update GitHub.com/stretchr/testify dependencies !1806 - Add support for X-GitLab-Trace-Update-Interval header !1760 - Revert 9e1d067621855c7b75820d3a49ac82ef51e56342 !1816 - Cleanup Kubernetes versions when checking for host aliases and don't fail on parse error !1823 - Add GitLab-runner-helper binaries to S3 release !1819 - Minor fixes in advanced-configuration.md !1828 - Remove install Makefile target !1822 - Docs osx install !1829 - Set DinD image explicitly to 19.03.5 !1840 - Make pipelines running also for X-Y-stable branches !1871 ## v12.7.1 (2020-01-23) - Fix `--docker-services` flag in register command !1776 ## v12.7.0 (2020-01-20) - Fixing kramdown link error !1711 - Add caps and backtick testing to runner docs linting !1678 - Fix macOS label !1712 - Align markdownlint config to main repo version !1713 - Bump go version to 1.13.5 !1701 - Remove duplicate service description !1715 - fix(scripts): fix until typo !1717 - Use Prometheus to Query Runner Metrics Linked to Each Job !1545 - Remove unnecessary dependencies from vendor directory !1721 - Remove panic when metrics referee not configured properly !1724 - Add check for go modules !1702 - Update docs for Helm 3 !1727 - Empty Referee configuration on registration !1726 - Extract helperimage pkg outside of parent pkg !1720 - Removed --name argument from Helm install. !1718 - macOS limitations and example update !1505 - Update advanced-configuration.md - fix typo of mperiods !1722 - Fix Typos !1731 - Add a Git version caveat !1732 - Update docs for Windows to use backslashes not forwardslashes !1738 - Do not embed mutex !1734 - Refactor CI pipeline !1733 - Add missing 'needs' entry for release Docker images job template !1744 - docs: Replace Ruby:2.1/2.2 with Ruby:2.6 or 2.7 !1748 - Make 'tags: GitLab-org' a re-usable job tempalte !1745 - Change to go1.13 error wrapping !1709 - Refactor metrics referee tests !1714 - Refactor metrics referee !1725 - Copyedit doc for volumes_from in Docker executor config !1750 - Allow service alias from config in Docker executor !1673 ## v12.6.0 (2019-12-22) - Update list of fixes for Docker-machine fork !1655 - Remove outdated note regarding non-existent 1.8.x brew Go formula in contributing docs !1661 - Add manual rpm and deb installation details !1650 - Remove GetGitTLSVariables method !1663 - Link to example of how to run DinD !1515 - Update feature flag deprecation !1672 - Add timeout when sending request for certificate !1665 - Support Docker options for CPU shares and OOM adjust !1460 - Backport os.Expand from Go v1.10.8 !1677 - Switch to a specific version of govet analyzer !1690 - Update cloud.Google.com/go to v0.49.0 !1682 - Add cmd to helper image !1645 - Update blurb for when people use the issue tracker for support tickets !1691 - Fixing typos !1685 - Remove dead code !1686 - Distribute arm64 binaries !1687 - (Rebased) Update Kubernetes.md adding the missing GitLab-helper container which is ever created. !1693 - Various spelling, punctuation and readability fixes !1660 - Add docs link to arm64 manual install !1694 - Fixed empty_dir name to empty-dir !1681 - Expose image to custom executor !1666 - Reorder methods and add some more logging to RunCommand !1683 - Refactor unused parameters for multi.go !1698 - Migrate to go 1.13 and go modules !1625 - Change log message for failing to set console mode !1662 - Use time.Round from Go stdlib for web terminal !1631 - Close session server on graceful shutdown !1699 - Add deprecation warning for cmd shell in Job log !1659 - Fix rpm signing !1703 - Fix regex for finding virtualbox snapshot name and add tests !1656 - Remove file locking !1710 - Change tone of error on Windows test failure !1610 - Fix CI image build !1707 ## v12.5.0 (2019-11-20) - Update docs for Runner configuration inside of a Docker container !1613 - Remove misleading comment !1622 - Remove absolute paths from chart !1626 - Fix lint on Markdown files !1602 - Document GitLab Docker machine fork !1596 - Update redirected link !1637 - Fix certificates chain generation regression introduced with 12.4.0-rc1 !1639 - Bump Docker_MACHINE_VERSION !1595 - Fix golint issues in machine pkg !1641 - Upgrade to alpine 3.10 !1636 - Fix #4684 for K3s/containerd !1605 - Update makefile to setup dev dependencies before running tests !1589 - Fix external Helm documentation links !1644 - Update Git version for Windows dev environment !1646 - Change config lock to create a separate lock file !1647 - Add few constants to executors/custom/API !1657 - Fix bind propagation for Linux volumes !1632 - Populate a list of machines with machines that might not yet be persisted on disk !914 - Add service definition in config for Kubernetes executor !1476 ## v12.4.1 (2019-10-28) - Fix TLS chain building !1643 ## v12.4.0 (2019-10-21) - Fix err logging for runner limit !1403 - Add the note about incompatibility of session_server with Helm chart runner !1575 - Fix prepare_exec typo in docs !1576 - Docs edits to clarify feature flags motivations and usage in Runner !1568 - Change log levels for common errors !1578 - Extend custom executor config !1583 - Fix JSON inside of docs !1587 - Update link for Helm chart issue tracker !1588 - Add pipeline ID to Docker labels !1592 - Fix typo in helpers/path/windows_path !1594 - Fix broken check for Git LFS that breaks lfs pulling !1599 - Update advanced-configuration.md !1597 - Use certutil to create certificate chain for Git !1581 - Add Go Report Card badge to the README file !1601 - Add pipeline link !1608 - Rename mentions of OSX to MacOS !1440 - Enable pinentry mode to loopback for GPG signing !1614 - Update various runner doc links !1585 - Add note about IAM role usage for s3 cache configuration !1598 - Bump used Go version to 1.10.8 !1617 - Update gopkg.in/YAML.v2 !1619 - Update Prometheus libraries !1620 - Bump GitHub.com/JSON-iterator/go to 1.1.7 !1621 - Update k8s client go to 11.0 !1615 - Rename log to trace in runner docs !1616 - Change Review priority label meaning !1600 - Add timeout when waiting for the build to finish !1609 ## v12.3.0 (2019-09-20) - Change log levels for common errors !1578 - Update a redirected link !1520 - Removal of conditions which are always evaluated either to true or false !1517 - Add initial docs for best practice !1509 - Update VirtualBox executor docs !1527 - Document configuration template file feature !1522 - Rename landing page for consistency !1528 - Edit new config template file section !1529 - Update windows dev environment to Git 2.22 !1530 - Update PowerShell ErrorActionPreference documentation !1535 - Remove Debian buster from package list !1536 - Update tls-self-signed.md !1537 - Improve windows helper images build !1519 - show which service exactly is invalid !1531 - Change docs Markdown linter from mdl to markdownlint !1540 - Replace bastion with Runner Manager !1547 - Add entry to FAQ, restructure also !1539 - Change docs review and cleanup jobs to same CI stage !1543 - Docker.md: Correct Image Sizes !1542 - Add note on shell-based Docker image requirement !1459 - Fixed PowerShell commands for Windows Runner !1544 - Remove the scripting for release checklist issue creation !1556 - Use new location for Helm charts repo !1553 - Make Notes look consistent !1555 - Change markdownlint wildcard format !1554 - Edit Docker images section !1550 - Update capitalization in runner docs !1559 - Docs/update Ubuntu dev docs !1557 - Use standard commands for directory creation to make it PowerShell core compatible !1563 - Fix exiting with zero exit code when cmdlets fail !1558 - Enable support for long paths !1524 - Prevent dollar signs in shell codeblocks !1574 - Clarify feature flag usage instructions !1566 - Expose variable containing the 'short token' value !1571 - Update documentation about OffPeakTimezone !1567 - Set default PATH for helper image !1573 ## v12.2.0 (2019-08-22) - Update docs executor titles !1454 - Only default to PowerShell on Windows if no other shell is specified !1457 - Add more MDL rules !1462 - Add PROCESS.md !1410 - Fix wrong rc script for freebsd. !1418 - Allow to build development version of DEB, RPM and Docker with make !824 - Add custom executor documentation !1416 - docs: clarify the requirements for pinning !823 - Adds explanation of our review label system. !1461 - Use FreeBSD's built-in stop and status scriplets from /etc/rc.subr !757 - Fix typo on security docs !956 - Update doc about Debian version !1464 - Move note to subsection !1469 - Correct spelling in help string !1471 - Force an opt-out from Docker Machine bugsnag report !1443 - Improved go install instructions for macOS !1472 - Fix some linting issues !1424 - Make it clear what is the default shell for Windows !1474 - Add LXD example for custom executor !1439 - Add libvirt custom executor example !1456 - Update self-signed certificate docs for Windows service !1466 - Docs/update min Docker version !1480 - Docs: Fix typo in custom executor !1479 - Track Windows tests failures !1450 - Add requirements for contributing new hardware architectures !1478 - Fix Markdown in runner docs (part 1) !1483 - Fix Markdown in runner docs (part 2) !1484 - Update docs to specify default shell of OS !1485 - Further clarify Docker requirements !1486 - Fix typo and spacing in two runner docs !1487 - docs: GitLab-runner helper image has no arm64 build yet !1489 - Fix custom executor default config on register !1491 - Update Windows test failures !1490 - Expand Markdown lint rules in runner !1492 - Fix PowerShell capitalization !1497 - Quarantine more windows tests !1499 - Update tracked Windows tests failures list !1502 - Quarantine windows tests !1501 - Add docs for tls_verify config field !1493 - Reorder methods in abstract.go to bring calees closer to the callers !1481 - Update docs about bash on windows not working !1498 - Cleanup commands/config.go !1494 - Switch to DinD TLS for GitLab CI !1504 - Add .gitattributes !1122 - Prevent running multiple instances of the GitLab-runner process using the same configuration file !1496 - Update test assertion !1510 - Remove need for externally configured variable !1512 - Change CI_COMMIT_REF to CI_COMMIT_SHA in docs !1513 - Update reference to CI_COMMIT_REF to CI_COMMIT_SHA !1514 - Configuration file template for registration command !1263 - Update AWS autoscaling docs !1518 - Add test for and masking !1516 ## v12.1.0 (2019-07-22) - Extend custom executor with configuration injects !1449 - Fix "WARNING: apt does not have a stable CLI interface. Use with caution in scripts" !1143 - Fix artifact uploading for Windows Docker containers !1414 - Upgrade base image for GitLab/GitLab-runner:ubuntu to Ubuntu:18.04 !1413 - Add tip to execute batch from PowerShell !1412 - Replace wget commands with curl commands !1419 - Wrap submodule command with a string !1411 - Add missing test cases for s3 IAM checks !1421 - Add Markdown linting and one rule !1422 - Fix indentation for docs !1417 - Add docs for not supporting LCOW !1415 - Disallow bare URLs from project !1425 - Update zglob !1426 - Add note in docs for mounting volumes to services !1420 - Clarify docs for `builds_dir` & `cache_dir` !1428 - Update docs to fix Markdown and square bracket use !1429 - Enforce consistent prefix for numbered lists !1435 - Remove fedora/30 from supported list !1436 - Add STOPSIGNAL to GitLab-runner Docker images !1427 - Add trace entry for Docker authConfig resolving !1431 - Enforce consistent prefix for bullet lists !1441 - Fix concurrent updates !1447 - docs: add --config for install command !1433 - Document why we no longer accept new executors !1437 - Document limitation for Windows Docker target drive !1432 - Trivial update to virtualbox.md - 'shutdown' is not the verb, barely the noun. !1445 - Update description of flag in docs !1451 - Docs: Update redirected links in runner docs !1453 - Add lint rule that headings must increment one level at a time !1452 - Add custom executor !1385 ## v12.0.0 (2019-06-21) **Release notices:** With GitLab Runner 12.0 we're adding several breaking changes: - [Require refspec to clone/fetch Git repository](https://gitlab.com/gitlab-org/gitlab-runner/issues/4069). - [Change command line API for helper images usage](https://gitlab.com/gitlab-org/gitlab-runner/issues/4013). - [Remove old cache configuration](https://gitlab.com/gitlab-org/gitlab-runner/issues/4070). - [Remove old metrics server configuration](https://gitlab.com/gitlab-org/gitlab-runner/issues/4072). - [Remove `FF_K8S_USE_ENTRYPOINT_OVER_COMMAND` feature flag and old behavior](https://gitlab.com/gitlab-org/gitlab-runner/issues/4073). - [Remove support for few Linux distributions that reached EOL](https://gitlab.com/gitlab-org/gitlab-runner/merge_requests/1130). - [Remove old `git clean` flow](https://gitlab.com/gitlab-org/gitlab-runner/issues/4175). Please look into linked issues for details. **Release changes:** - Support windows Docker volumes configuration !1269 - Fix PowerShell cloning !1338 - Docs: Update Docker register non-interactive command !1309 - Update mocks !1343 - Change source for go-homedir !1339 - improve MR and issues templates !1347 - docs: reuse previous clone !1346 - Prevent copy and paste error due to not existed alpine tag. !1351 - Fix typo for usage of proxies within Docker containers for runners !1342 - Add documentation for Windows Docker Executor !1345 - Fix volume mounting when mode specified !1357 - Update docs for Docker executor description !1358 - Show error when volume length is not expected !1360 - Add feature flag to mounting volumes to services !1352 - Implement session endpoint to proxy build services requests !1170 - add build info for fedora 30 !1353 - Limit `docker-windows` to Windows !1362 - Update logging key for Docker Machine !1361 - Update docs to refer to Windows Batch deprecation !1371 - Remove deprecated Git clean strategy !1370 - Remove support for deprecated metrics_server setting !1368 - Add labels to templates !1375 - Remove support for deprecated entrypoint configuration for K8S !1369 - Fix support for SELinux volume mounts & case sensitivity !1381 - Remove old Docker helper image commands !1373 - Remove support for deprecated S3 cache configuration !1367 - Added --system flag information into GitLab-runner install command !1378 - Minor Markdown fixes !1382 - Remove support for deprecated distributions !1130 - Add configuration of access_level for runners on registration !1323 - Remove doc notice for deprecated OSes !1384 - Remove deprecated clone/fetch command !1372 - Allow configuration of Pod Security Context by Kubernetes Exeutor !1036 - Fix case sensitivity for windows volumes !1389 - Accept Docker-windows as an option on register !1388 - Add documentation for windows development !1183 - Document clear-Docker-cache script !1390 - Store traces on disk !1315 - Make Git init to be quiet !1383 - Fix several typos !1392 - Make volumes to work on linux Docker on windows !1363 - Update CHANGELOG.md with 11.11.x patch releases !1393 - Dependencies license management with GitLab CI/CD !1279 - Fix default cache volume Docker-windows register !1391 - Fixed date typo for v11.11.2 CHANGELOG entry !1394 - Update GitHub.com/Microsoft/go-winio dependency !1348 - Update compatibility heading as it's no longer a chart/table !1401 - Docker Credentials helper support !1386 - Numerous typos fixed !1258 - Update some logrus fields used in Runner logs !1405 - Update osx.md so the update instructions work as well as the install instructions !1402 - Make PowerShell default for new registered Windows shell executors !1406 - Restore gofmt rules from before codeclimate update !1408 - Update logrus to v1.4.0 !1407 ## v11.11.2 (2019-06-03) - Fix support for SELinux volume mounts & case sensitivity !1381 - Fix case sensitivity for windows volumes !1389 - Update logging key for Docker Machine !1361 - Limit `docker-windows` to Windows !1362 - Make volumes to work on linux Docker on windows !1363 ## v11.11.1 (2019-05-24) - Fix volume mounting when mode specified !1357 - Add documentation for Windows Docker Executor !1345 - Add feature flag to mounting volumes to services !1352 ## v11.11.0 (2019-05-22) - Fix PowerShell cloning !1338 - Add PowerShell support for Docker Executor !1243 - Support windows Docker volumes configuration !1269 - Fix Git LFS not getting submodule objects !1298 - Add homebrew installation method for macOS runners !837 - mention the 59th second timeperiod issue in the docs !490 - Refactor macOS install instructions !1303 - Edit note on edge case !1304 - Extract unsupportedOSTypeError to errors pkg !1305 - Optimise trace handling for big traces !1292 - Cleanup feature flags mess !1312 - Add more documentation for node tolerations !1318 - Typo: varialbes -> variables !1316 - Allow to configure FF using config.TOML !1321 - Update link to the introduction of custom build directories !1302 - Allow to use FF to configure `/builds` folder !1319 - Create a single source of truth for feature flags !1313 - Clear up docs on how to select shell !1209 - Update feature flag documentation !1326 - Refactor Helper Image package to work with Kubernetes !1306 - Fix broken internal links !1332 - Refactor helperimage package tests !1327 - Change deprecation of FF_USE_LEGACY_BUILDS_DIR_FOR_Docker to 12.3 !1330 - Update cmd script example !1333 - Better explain the workflow in Docker executors doc !1310 - Exclude mock files from coverage reporting !1334 - Fix link syntax in advanced-configuration.md !1311 - Docs: Update contributing links from GitLab-ce !1308 - Update Docker executor Executor Options initialization !1296 - Add test case for Linux helper image !1335 - Extract volumes configuration to a separate struct !1261 ## v11.10.0 (2019-04-22) **Deprecations:** All deprecations, with a detailed description, are listed at 1. With version 11.10 we're deprecating the feature flag [FF_USE_LEGACY_GIT_CLEAN_STRATEGY](https://docs.gitlab.com/runner/configuration/feature-flags/#available-feature-flags). **Release changes:** - Fix Git LFS not getting submodule objects !1298 - Refactor slightly ./shells/shellstest !1237 - Fix CI_PROJECT_DIR handling !1241 - Log time took preparing executors !1196 - Restore availability of pprof in the debug server !1242 - Move variables defining .gopath to a shared place for all Windows jobs !1245 - Docs: clarify runner API registration process !1244 - add lfs support to Ubuntu Docker runner !1192 - Add information about Kaniko for Kubernetes executor !1161 - Enable the docs CI job !1251 - Rename test to be more descriptive !1249 - Create the reviewers guide base document !1233 - Update codeclimate version !1252 - Add retryable err type !1215 - Get windows tag for helper image !1239 - Remove unnecessary log alias for logrus inport !1256 - Make GitLab-runner:alpine more specific, Add link to Dockerfiles sources,... !1259 - Docs: Fix broken anchor in Docker.md !1264 - Replace the current k8s manual installation with the Helm chart !1250 - Create cache for `/builds` dir !1265 - Expose `CI_CONCURRENT_(PROJECT)_ID` !1268 - DOC: note on case-sensitive proxy variables and the need for upper and lower case versions !1248 - Add new links checker !1271 - Update log messages for listen & session address !1275 - Use delayed variable expansion for error check in cmd !1260 - Unexport common.RepoRemoteURL !1276 - Update index.md - added sudo when registering the service on macos (without... !1272 - Add new lines around lists for renderer !1278 - Fix color output on Windows !1208 - Make it again possible to disable Git LFS pull !1273 - Add cross references to Runners API !1284 - Improve support for `git clean` !1281 - Make Kubernetes executor to clone into /builds !1282 - Add option to specify clone path !1267 - Allow to disable debug tracing !1286 - Add Route Map for runner docs !1285 - Do not print remote addition failure message !1287 - Add true to the run-untagged subcommand !1288 - Cleanup k8s cleanup test !1280 - Change helper image to servercore !1290 - Add note about Git-lfs !1294 ## v11.9.2 (2019-04-09) - Fix Git LFS not getting submodule objects !1298 ## v11.9.1 (2019-04-03) - Make it again possible to disable Git LFS pull !1273 - Use delayed variable expansion for error check in cmd !1260 - Unexport common.RepoRemoteURL !1276 ## v11.9.0 (2019-03-22) **Deprecations:** All deprecations, with a detailed description, are listed at 1. With version 11.9 we're deprecating the support for Docker Executor on CentOS 6 2. With version 11.9 we've implemented a new method for cloning/fetching repositories. Currently GitLab Runner still respects the old configuration sent from GitLab, but with 12.0 old methods will be removed and GitLab Runner will require at least GitLab 11.9 to work properly. 3. With version 11.0 we've changed how the metrics server is configured for GitLab Runner. `metrics_server` was replaced with `listen_address`. With version 12.0 the old configuration option will be removed. 4. With version 11.3 we've implemented support for different remote cache providers, which required a change in how the cache is configured. With version 12.0 support for old configuration structure will be removed. 5. With version 11.4 we've fixed the way how `entrypoint:` and `command:` options of Extended Docker configuration () are being handled by Kubernetes Executor. The previous implementation was wrong and was making the configuration unusable in most cases. However some users could relay on this wrong behavior. Because of that we've added a feature flag `FF_K8S_USE_ENTRYPOINT_OVER_COMMAND` which, when set to `false`, could bring back the old behavior. With version 12.0 the feature flag as well as the old behavior will be removed. 6. Some Linux distributions for which GitLab Runner is providing DEB and RPM packages have reached their End of Life. With version 12.0 we'll remove support for all EoL distributions at the moment of 12.0 release. 7. With version 11.9 we've prepared a go-based replacement for Runner Helper commands executed within Docker executor inside of the Helper Image. With version 12.0 we will remove support for old commands basing on bash scripts. This change will affect only the users that are configuring their custom Helper Image (the image will require an update to align with new requirements) **Release changes:** - fix(parallels): use the newer sntp command to time sync !1145 - Update Docker API verion !1187 - Update alpine images to alpine 3.9 !1197 - Fix a typo in the description of the configuration option !1205 - Document creation of Docker volumes passed with Docker exec --Docker-volumes !1120 - Correct spelling of timed out in literals !1121 - Fix spelling and other minor improvements !1207 - Migrate service wait script to Go !1195 - Docs update: Run runner on Kubernetes !1185 - Increase test timeout for shell executor !1214 - Follow style convention for documentation !1213 - Add test for runner build limit !1186 - Migrate cache bash script to Go for helper image !1201 - Document OS deprecations for 12.0 !1210 - Fix anchors in Runner documentation !1216 - Add `build_simple` to `help` make target !1212 - Split `make docker` for GitLab Runner Helper !1188 - Add windows Dockerfiles for GitLab-runner-helper !1167 - Make Runner tests working on Windows with our CI Pipeline !1219 - Fetch code from provided refspecs !1203 - Check either ntpdate command exists or not before trying to execute it !1189 - Deprecate helper image commands !1218 - Add script for building windows helper image !1178 - Fix ShellWriter.RmFile(string) for cmd shell !1226 - Mask log trace !1204 - Add note about pod annotations for more clarity !1220 - Resolve memory allocation failure when cloning repos with LFS objects bigger than available RAM !1200 - Release also on GitLab releases page !1232 - Restore availability of pprof in the debug server !1242 ## v11.8.0 (2019-02-22) - Kubernetes executor: add support for Node tolerations !941 - Update logrus version to v1.3.0 !1137 - Docs - Clarify Docker Runner Documentation !1097 - Update GitHub.com/stretchr/testify dependency !1141 - Update LICENSE file !1132 - Update example of cache config !1140 - Update documentation for autoscaling on AWS !1142 - Remove unnecessary dep constraint !1147 - readme: make author block render md !999 - Corrected note when using a config container to mount custom data volume. !1126 - Fix typo in documentation of k8s executor. !1118 - Make new runner tokens compatible with Docker-machine executor !1144 - docs: Use `sudo tee` for apt pinning. !1047 - docs: fix indendation !1081 - Updated hint on running Windows 10 shell as administrator !1136 - Fixed typo in logged information !1074 - Update registry_and_cache_servers.md !1098 - Update golang.org/x/sys !1149 - Refactor frontpage for grammar and style !1151 - Update GitHub.com/Azure/go-ansiterm dependency !1152 - Testing on windows with vagrant !1003 - Add fix for race condition in windows cache extraction !863 - Consolidate Docker API version definition !1154 - Prevent Executors from modifying Runner configuration !1134 - Update ExecutorProvider interface signature !1159 - Update logging for processing multi runner !1160 - Update Kubernetes.md - fix typo for bearer_token !1162 - Update GitHub.com/Prometheus/client_golang dep !1150 - Remove ContainerWait from Docker client !1155 - Update advanced-configuration.md: Fix blockquote not reaching the entire note !1163 - Fix docs review app URL !1169 - docs: Add a helpful command to reload config !1106 - Update AWS autoscale documentation !1166 - Refactor dockerfiles !1068 - Add link to AWS driver about default values !1171 - Add support for fedora/29 packages !1082 - Add windows server 2019 as default for windows development !1165 - Docs: Fix bad anchor links in runner docs !1177 - Improve documentation concerning proxy setting in the case of Docker-in-Docker-executor !1090 - Add few fixes to Release Checklist template !1135 - Set table to not display under TOC !1168 - Update Docker client SDK !1148 - docs: add GitLab Runner Helm Chart link !945 ## v11.7.0 (2019-01-22) - Docs: Cleaning up the executors doc !1114 - Update to testify v1.2.2 !1119 - Fix a typo in VirtualBox Executor docs !1124 - Use the term `macOS` instead of `OS X` or `OSX` !1125 - Update GitHub.com/sirupsen/logrus dependency !1129 - Docs update release checklist !1131 - Kill session when build is cancelled !1058 - Fix path separator for CI_PROJECT_DIR in Windows !1128 - Make new runner tokens compatible with Docker-machine executor !1144 ## v11.6.0 (2018-12-22) - Make compatibility chart super clear and remove old entries !1078 - Add Slack notification option for 'dep status' check failures !1072 - Docker executor: use DNS, DNSSearch and ExtraHosts settings from configuration !1075 - Fix some invalid links in documentation !1085 - Fix SC2155 where shellcheck warns about errors !1063 - Change parallel tests configuration ENV names !1095 - Improve documentation of IAM instance profile usage for caching !1071 - Remove duplicate builds_dir definition from docs !952 - Make k8s object names DNS-1123 compatible !1105 - Docs: working example of helper image with CI_RUNNER_REVISION !1032 - Docs: omit ImagePullPolicy !1107 - Disable the docs lint job for now !1112 - Docs: comment about how listen_address works !1076 - Fix the indented bullet points of the features list in documentation !1093 - Add note on the branch naming for documentation changes !1113 - Docs: add session-server link to advanced list in index !1108 ## v11.5.0 (2018-11-22) - Support RAW artifacts !1057 - Docs: changing secret variable to just variable in advanced-configuration.md !1055 - Docs: Fixing some bad links in Runner docs. !1056 - Docs: Updating Docs links from /ce to /ee !1061 - Docs: Fixing Substrakt Health URL !1064 - Add failure reason for execution timeout !1051 ## v11.4.0 (2018-10-22) - Do not create apk cache !1017 - Handle untracked files with Unicode characters in filenames. !913 - Add metrics with concurrent and limit values !1019 - Add a GitLab_runner_jobs_total metric !1018 - Add a job duration histogram metric !1025 - Filter content of X-Amz-Credential from logs !1028 - Disable escaping project bucket in cache operations !1029 - Fix example for session_server and added the note about where this section should be placed !1035 - Fix job duration counting !1033 - Log duration on job finishing log line !1034 - Allow disabling Docker entrypoint overwrite !965 - Fix command and args assignment when creating containers with K8S executor !1010 - Support JSON logging !1020 - Change image for docs link checking !1043 - Fix command that prepares the definitions of tests !1044 - Add OomKillDisable option to Docker executor !1042 - Add Docker support for interactive web terminal !1008 - Add support Docker machine web terminal support !1046 ## v11.3.0 (2018-09-22) - Fix logrus secrets cleanup !990 - Fix test failure detection !993 - Fix wrongly generated `Content-Range` header for `PATCH /api/v4/jobs/:id/trace` request !906 - Improve and fix release checklist !940 - Add ~"Git operations" label to CONTRIBUTING guide !943 - Disable few jobs for docs-/-docs branches !996 - Update release checklist issue template !995 - Fix HTTPS validation problem when SSH executor is used !962 - Reduce complexity of reported methods !997 - Update Docker images to alpine:3.8 !984 - Fail build in case of code_quality errors !986 - Add initial support for CI Web Terminal !934 - Make session and metrics server initialization logging consistent !994 - Make prepare-changelog-entries.rb script compatible with GitLab APIv4 !927 - Save compilation time always in UTC timezone !1000 - Extend debug logging for k8s executor !949 - Introduce GCS adapter for remote cache !968 - Make configuration of helper image more dynamic !1005 - Logrus upgrade - fix data race in helpers.MakeFatalToPanic() !1011 - Add few TODOs to mark things that should be cleaned in 12.0 !1013 - Update debug jobs list output !992 - Remove duplicate build_dir setting !1015 - Add step for updating Runner Helm chart !1009 - Clenup env, cli-options and deprecations of cache settings !1012 ## v11.2.0 (2018-08-22) - Fix support for Unicode variable values when Windows+PowerShell are used !960 - Update docs/executors/Kubernetes.md !957 - Fix missing code_quality widget !972 - Add `artifact` format !923 - Improve some k8s executor tests !980 - Set useragent in Kubernetes API calls !977 - Clarifying the tls-ca-file option is in the [[runners]] section !973 - Update mocks !983 - Add building to development heading !919 - Add coverage report for unit tests !928 - Add /etc/nsswitch.conf to helper on Docker executor to read /etc/hosts when upload artifacts !951 - Add busybox shell !900 - Fix support for features for shells !989 - Fix logrus secrets cleanup !990 - Fix test failure detection !993 ## v11.1.0 (2018-07-22) - Fix support for Unicode variable values when Windows+PowerShell are used !960 - Unify receivers used for 'executor' struct in ./executors/Docker/ !926 - Update Release Checklist template !898 - Cache the connectivity of live Docker Machine instances !909 - Update Kubernetes vendor to 1.10 !877 - Upgrade helper image alpine 3.7 !917 - Detect possible misplaced boolean on command line !932 - Log 'metrics_server' deprecation not only when the setting is used !939 - Speed-up ./executor/Docker/executor_Docker_command_test.go tests !937 - Remove go-bindata !831 - Fix the release of helper images script !946 - Sign RPM and DEB packages !922 - Improve Docker timeouts !963 - Wrap all Docker errors !964 ## v11.0.0 (2018-06-22) - Resolve "Invalid OffPeakPeriods value, no such file or directory." !897 - Add --paused option to register command !896 - Start rename of "metrics server" config !838 - Update virtualbox.md temporary fix for #2981 !889 - Fix panic on PatchTrace execution !905 - Do not send first PUT !908 - Rename CI_COMMIT_REF to CI_COMMIT_SHA !911 - Fix test file archiver tests !915 - Document how check_interval works !903 - Add link to development guide in readme !918 - Explain GitLab-runner workflow labels !921 - Change Prometheus metrics names !912 ## v10.8.0 (2018-05-22) - Resolve "Invalid OffPeakPeriods value, no such file or directory." !897 - Fix type in Substrakt Health company name !875 - Rename libre to core !879 - Correct hanging parenthesis in index.md !882 - Update interfaces mocks !871 - Rename keyword in Kubernetes executor documentation !880 - Temporary add 'retry: 2' for 'unit tests (no race)' job !885 - Update docs/executors/README.md !881 - Add support for fedora/27 and fedora/28 packages !883 - Update supported distribution releases !887 - Automatize release checklist issue creation !870 - Change docs license to CC BY-SA 4.0 !893 - Update Docker installation method docs !890 - Add new metrics related to jobs requesting and API usage !886 ## v10.7.0 (2018-04-22) - Rename Sirupsen/logrus library !843 - Refer to GitLab versions as libre, starter, premium, and ultimate !851 - Fix assert.Equal parameter order !854 - Upgrade Docker-machine to v0.14.0 !850 - Refactor autoscale docs !733 - Add possibility to specify memory in Docker containers !847 - Upgrade helper image to alpine 3.6 !859 - Update Docker images bases to alpine:3.7 and Ubuntu:16:04 !860 - Verify Git-lfs checksum !796 - Improve services health check !867 - Add proxy documentation !623 - Downgrade go to 1.8.7 !869 - Add support for max_job_timeout parameter in registration !846 ## v10.6.0 (2018-03-22) - Upgrade Docker-machine to v0.14.0 !850 - Upgrade helper image to alpine 3.6 !859 - Add CI_RUNNER_VERSION, CI_RUNNER_REVISION, and CI_RUNNER_EXECUTABLE_ARCH job environment variables !788 - Always prefer creating new containers when running with Docker Executor !818 - Use IAM instance profile credentials for S3 caching !646 - exec command is no longer deprecated !834 - Print a notice when skipping cache operation due to empty cache key !842 - Switch to Go 1.9.4 !827 - Move dependencies to dep !813 - Improve output of /debug/jobs/list !826 - Fix panic running Docker package tests !828 - Fixed typo in console output !845 ## v10.5.0 (2018-02-22) - Always prefer creating new containers when running with Docker Executor !818 - Improve output of /debug/jobs/list !826 - Fix panic running Docker package tests !828 - Fix Git 1.7.1 compatibility in executors/shell package tests !791 - Do not add /cache volume if already provided by the user during GitLab-runner register !807 - Change confusing Built value for development version !821 - docs: explain valid values for check_interval !801 - docs: Fix OffPeak variables list !806 - docs: Add note about GitLab-runner on the SSH host being used for uploads !817 ## v10.4.0 (2018-01-22) - Always load OS certificate pool when evaluating TLS connections !804 - Add (overwritable) pod annotations for the Kubernetes executor !666 - Docker.allowed_images can use glob syntax in config.TOML !721 - Added Docker runtime support !764 - Send `failure_reason` when updating job statues (GitLab API endpoint) !675 - Do not use `git config --local` as it's not available in Git v1.7.1 !790 - Use local GOPATH in Makefile !779 - Move Bleeding Edge release from Ubuntu/yakkety to ububut/artful !797 - Fix data race in commands package unit tests !787 - Fix data race in function common.(\*Trace).Write() !784 - Fix data races in executor/Docker package !800 - Fix data races in network package !775 ## v10.3.1 (2018-01-22) - Always load OS certificate pool when evaluating TLS connections !804 ## v10.3.0 (2017-12-22) - Do not use `git config --local` as it's not available in Git v1.7.1 !790 - new RC naming schema !780 - Stop Docker Machine before removing it !718 - add `--checkout --force` options to `git submodule update --init` !704 - Fix trailing "" in syslog logging !734 - Fix Kubernetes executor job overwritten variables behavior !739 - Add zip archive for windows release files !760 - Add Kubernetes executor connection with service account, bearer token can also be overwritten !744 - Fix SIGSEGV in Kubernetes executor Cleanup !769 ## v10.2.1 (2018-01-22) - Do not use `git config --local` as it's not available in Git v1.7.1 !790 - Always load OS certificate pool when evaluating TLS connections !804 ## v10.2.0 (2017-11-22) - Update supported platforms !712 - Fix typo in Kubernetes runner docs !714 - Add info on upgrading to Runner 10 !709 - Add some documentation for disable_cache configuration option !713 - Remove .Git/HEAD.lock before Git fetch !722 - Add helper_image option to Docker executor config !723 - Add notes about GitLab-runner inside the VM being used for uploads !719 - Fix panic when global flags are passed as command flags !726 - Update MinIO go library to v3.0.3 !707 - Label ci_runner_builds metric with runner short token !729 ## v10.1.1 (2018-01-22) - Do not use `git config --local` as it's not available in Git v1.7.1 !790 - Always load OS certificate pool when evaluating TLS connections !804 ## v10.1.0 (2017-10-22) - Allow customizing go test flags with TESTFLAGS variable !688 - Clarify that cloning a runner could be considered an attack vector !658 - Remove disable_verbose from docs !692 - Add info about pre 10.0 releases !691 - Update BurntSushi/TOML for MIT-license !695 - Expose if running in a disposable environment !690 - Adds EmptyDir support for k8s volumes !660 - Update Git-lfs to 2.3.1 !703 - Collect metrics on build stages !689 - Construct Git remote URL based on configuration !698 - Set Git SSL information only for GitLab host !687 ## v10.0.2 (2017-10-04) - Hide tokens from URLs printed in job's trace !708 ## v10.0.1 (2017-09-27) - Remove deprecation message from service management commands !699 ## v10.0.0 (2017-09-22) > **Note:** With 10.0, we've moved repository from > to . Please update your Bookmarks! > **Note:** Starting with 10.0, we're marking the `exec` and service-related commands as **deprecated**. They will > be removed in one of the upcoming releases. > **Note:** Starting with 10.0, we're marking the `docker-ssh` and `docker-ssh+machine` executors as **deprecated**. > They will be removed in one of the upcoming releases. > **Note:** Starting with 10.0, behavior of `register` command was slightly changed. Please look into > for more details. - Lock runners to project by default on registration !657 - Update cli library !656 - Fix RunSingleCommand race condition in waitForInterrupts !594 - Add handling of non-existing images for Docker >= 17.07 !664 - Document how to define default image to run using Kubernetes executor !668 - Specify an explicit length for Git rev-parse --short to avoid conflicts when run !672 - Add link to Kubernetes executor details !670 - Add install VirtualBox step & improve VM setup details !676 - Rename repository from GitLab-ci-multi-runner to GitLab-runner !661 - Fix variable file permission !655 - Add Release Checklist template !677 - Fix randomly failing test from commands/single_test.go !684 - Mark Docker-SSH and Docker-SSH+machine executors as DEPRECATED !681 - Mark exec and service-management commands as DEPRECATED !679 - Fix support for `tmpfs` in Docker executor config !680 ## v9.5.1 (2017-10-04) - Hide tokens from URLs printed in job's trace !708 - Add handling of non-existing images for Docker >= 17.07 !664 ## v9.5.0 (2017-08-22) - Fix allowed_images behavior !635 - Cleanup formatting on windows upgrade details !637 - Names must meet the DNS name requirements (no upper case) !636 - Execute steps for build as-is, without joining and splitting them !626 - Fix typo on killall command !638 - Fix usage of one image for multiple services in one job !639 - Update Docker Machine to 0.12.2 and add checksum checking for Docker Machine and dumb-init for official Docker images !640 - Fix services usage when service name is using variable !641 - Remove confusing compatibility check !642 - Add sysctl support for Docker executor !541 - Reduce binary size with removing debugging symbols !643 - Add support for credentials store !501 - Fix I am not sure section link !650 - Add tzdata by default to official Docker images to avoid OffPeakPeriods timezone error !649 - Fix read error from upload artifacts execution !645 - Add support for tmpfs on the job container !654 - Include note about volume path on OSX !648 - Start using 'toc' in YAML frontmatter to explicitly disable it !644 ## v9.4.3 (2017-10-04) - Hide tokens from URLs printed in job's trace !708 - Add handling of non-existing images for Docker >= 17.07 !664 ## v9.4.2 (2017-08-02) - Fix usage of one image for multiple services in one job !639 - Fix services usage when service name is using variable !641 ## v9.4.1 (2017-07-25) - Fix allowed_images behavior !635 ## v9.4.0 (2017-07-22) - Use Go 1.8 for CI !620 - Warn on archiving Git directory !591 - Add CacheClient with timeout configuration for cache operations !608 - Remove '.Git/hooks/post-checkout' hooks when using fetch strategy !603 - Fix VirtualBox and Parallels executors registration bugs !589 - Support Kubernetes PVCs !606 - Support cache policies in .GitLab-ci.yml !621 - Improve Kubernetes volumes support !625 - Adds an option `--all` to unregister command !622 - Add the technical description of version release !631 - Update documentation on building Docker images inside of a Kubernetes cluster. !628 - Support for extended Docker configuration in GitLab-ci.yml !596 - Add ServicesTmpfs options to Docker runner configuration. !605 - Fix network timeouts !634 ## v9.3.0 (2017-06-22) - Make GitLab Runner metrics HTTP endpoint default to :9252 !584 - Add handling for Git_CHECKOUT variable to skip checkout !585 - Use HTTP status code constants from net/http library !569 - Remove tls-skip-verify from advanced-configuration.md !590 - Improve Docker machine removal !582 - Add support for Docker '--cpus' option !586 - Add requests backoff mechanism !570 - Fixed doc typo, change `--service-name` to `--service` !592 - Slight fix to build/ path in multi runner documentation !598 - Move docs on private Registry to GitLab docs !597 - Install Git LFS in Helper image for X86_64 !588 - Docker entrypoint: use exec !581 - Create GitLab-runner user on alpine !593 - Move registering Runners info in a separate document !599 - Add basic support for Kubernetes volumes !516 - Add required runners.Docker section to example config. !604 - Add userns support for Docker executor !553 - Fix another regression on Docker-machine credentials usage !610 - Added ref of Docker app installation !612 - Update linux-repository.md !615 ## v9.2.2 (2017-07-04) - Fix VirtualBox and Parallels executors registration bugs !589 ## v9.2.1 (2017-06-17) - Fix regression introduced in the way how `exec` parses `.gitlab-ci.yml` !535 - Fix another regression on Docker-machine credentials usage !610 ## v9.2.0 (2017-05-22) This release introduces a change in the ordering of artifacts and cache restoring! It may happen that someone, by mistake or by purpose, uses the same path in `.gitlab-ci.yml` for both cache and artifacts keywords, and this could cause that a stale cache might inadvertently override artifacts that are used across the pipeline. Starting with this release, artifacts are always restored after the cache to ensure that even in edge cases you can always rely on them. - Improve Windows runner details !514 - Add support for TLS client authentication !157 - Fix apt-get syntax to install a specific version. !563 - Add link to Using Docker Build CI docs !561 - Document the `coordinator` and make the FAQ list unordered !567 - Add links to additional Kubernetes details !566 - Add '/debug/jobs/list' endpoint that lists all handled jobs !564 - Remove .godir !568 - Add PodLabels field to Kubernetes config structure !558 - Remove the build container after execution has completed !571 - Print proper message when cache upload operation failed !556 - Remove redundant ToC from autoscale docs and add intro paragraph !574 - Make possible to compile Runner under Openbsd2 !511 - Improve Docker configuration docs !576 - Use contexes everywhere !559 - Add support for Kubernetes service account and override on GitLab-ci.YAML !554 - Restore cache before artifacts !577 - Fix link to the LICENSE file. !579 ## v9.1.3 (2017-07-04) - Fix VirtualBox and Parallels executors registration bugs !589 ## v9.1.2 (2017-06-17) - Print proper message when cache upload operation fails !556 - Fix regression introduced in the way how `exec` parses `.gitlab-ci.yml` !535 ## v9.1.1 (2017-05-02) - Fix apt-get syntax to install a specific version. !563 - Remove the build container after execution has completed !571 ## v9.1.0 (2017-04-22) - Don't install docs for the fpm Gem !526 - Mention tagged S3 sources in installation documentation !513 - Extend documentation about accessing Docker services !527 - Replace b.CurrentStage with b.CurrentState where it was misused !530 - Docker provider metrics cleanups and renaming !531 - Replace godep with govendor !505 - Add histogram metrics for Docker machine creation !533 - Fix cache containers dicsovering regression !534 - Add urls to environments created with CI release jobs !537 - Remove unmanaged Docker images sources !538 - Speed up CI pipeline !536 - Add job for checking the internal docs links !542 - Mention Runner -> GitLab compatibility concerns after 9.0 release !544 - Log error if API v4 is not present (GitLab CE/EE is older than 9.0) !528 - Cleanup variables set on GitLab already !523 - Add faq entry describing how to handle missing zoneinfo.zip problem !543 - Add documentation on how Runner uses MinIO library !419 - Update Docker.md - typo in runners documentation link !546 - Add log_level option to config.TOML !524 - Support private registries with Kubernetes !551 - Cleanup Kubernetes typos and wording !550 - Fix runner crashing on builds helper collect !529 - Config docs: Fix syntax in example TOML for Kubernetes !552 - Docker: Allow to configure shared memory size !468 - Return error for cache-extractor command when S3 cache source returns 404 !429 - Add executor stage to ci_runner_builds metric's labels !548 - Don't show image's ID when it's the same as image's name !557 - Extended verify command with runner selector !532 - Changed information line logged by Runner while unregistering !540 - Properly configure connection timeouts and keep-alives !560 - Log fatal error when concurrent is less than 1 !549 ## v9.0.4 (2017-05-02) - Fix apt-get syntax to install a specific version. !563 - Remove the build container after execution has completed !571 ## v9.0.3 (2017-04-21) - Fix runner crashing on builds helper collect !529 - Properly configure connection timeouts and keep-alives !560 ## v9.0.2 (2017-04-06) - Speed up CI pipeline !536 ## v9.0.1 (2017-04-05) - Don't install docs for the fpm Gem !526 - Mention tagged S3 sources in installation documentation !513 - Replace b.CurrentStage with b.CurrentState where it was misused !530 - Replace godep with govendor !505 - Fix cache containers dicsovering regression !534 - Add urls to environments created with CI release jobs !537 - Mention Runner -> GitLab compatibility concerns after 9.0 release !544 - Log error if API v4 is not present (GitLab CE/EE is older than 9.0) !528 ## v9.0.0 (2017-03-22) - Change dependency from `github.com/fsouza/go-dockerclient` to `github.com/docker/docker/client`" !301 - Update Docker-machine version to fix coreos provision !500 - Cleanup windows install docs !497 - Replace io.Copy with stdcopy.StdCopy for Docker output handling !503 - Fixes typo: current to concurrent. !508 - Modifies autoscale algorithm example !509 - Force-terminate VirtualBox and Parallels VMs so snapshot restore works properly !313 - Fix indentation of 'image_pull_secrets' in Kubernetes configuration example !512 - Show Docker image ID in job's log !507 - Fix word consistency in autoscaling docs !519 - Rename the binary on download to use GitLab-runner as command !510 - Improve details around limits !502 - Switch from CI API v1 to API v4 !517 - Make it easier to run tests locally !506 - Kubernetes private credentials !520 - Limit number of concurrent requests to builds/register.JSON !518 - Remove deprecated Kubernetes executor configuration fields !521 - Drop Kubernetes executor 'experimental' notice !525 ## v1.11.5 (2017-07-04) - Fix VirtualBox and Parallels executors registration bugs !589 ## v1.11.4 (2017-04-28) - Fixes test that was failing 1.11.3 release ## v1.11.3 (2017-04-28) - Add urls to environments created with CI release jobs !537 - Speed up CI pipeline !536 - Fix runner crashing on builds helper collect !529 ## v1.11.2 (2017-04-04) - Force-terminate VirtualBox and Parallels VMs so snapshot restore works properly !313 - Don't install docs for the fpm Gem !526 - Mention tagged S3 sources in installation documentation !513 - Limit number of concurrent requests to builds/register.JSON !518 - Replace b.CurrentStage with b.CurrentState where it was misused !530 ## v1.11.1 (2017-03-03) - Update Docker-machine version to fix coreos provision !500 ## v1.11.0 (2017-02-22) - Fix S3 and packagecloud uploads step in release process !455 - Add Ubuntu/yakkety to packages generation list !458 - Reduce size of GitLab-runner-helper images !456 - Fix crash on machine creation !461 - Rename 'Build (succeeded|failed)' to 'Job (succeeded|failed)' !459 - Fix race in helpers/Prometheus/log_hook.go: Fire() method !463 - Fix missing VERSION on Mac build !465 - Added post_build_script to call scripts after user-defined build scripts !460 - Fix offense reported by vet. Add vet to 'code style' job. !477 - Add the runner name to the first line of log output, after the version !473 - Make CI_DEBUG_TRACE working on Windows CMD !483 - Update packages targets !485 - Update Makefile (fix permissions on /usr/share/GitLab-runner/) !487 - Add timezone support for OffPeak intervals !479 - Set Git_SUBMODULE_STRATEGY=SubmoduleNone when Git_STRATEGY=GitNone !480 - Update maintainers information !489 ## v1.10.8 (2017-04-04) - Force-terminate VirtualBox and Parallels VMs so snapshot restore works properly !313 - Don't install docs for the fpm Gem !526 - Mention tagged S3 sources in installation documentation !513 - Limit number of concurrent requests to builds/register.JSON !518 - Replace b.CurrentStage with b.CurrentState where it was misused !530 ## v1.10.7 (2017-03-03) - Update Docker-machine version to fix coreos provision !500 ## v1.10.6 (2017-02-22) - Update Makefile (fix permissions on /usr/share/GitLab-runner/) !487 ## v1.10.5 (2017-02-20) - Update packages targets !485 ## v1.10.4 (2017-01-31) - Fix race in helpers/Prometheus/log_hook.go: Fire() method !463 ## v1.10.3 (2017-01-27) - Fix crash on machine creation !461 ## v1.10.2 (2017-01-26) - Add Ubuntu/yakkety to packages generation list !458 - Reduce size of GitLab-runner-helper images !456 ## v1.10.1 (2017-01-23) - Fix S3 and packagecloud uploads step in release process !455 ## v1.10.0 (2017-01-22) - Make /usr/share/GitLab-runner/clear-Docker-cache script /bin/sh compatible !427 - Handle Content-Type header with charset information !430 - Don't raise error if machines directory is missing on machines listing !433 - Change digital ocean autoscale to use stable coreos channel !434 - Fix package's scripts permissions !440 - Use -q flag instead of --format. !442 - Kubernetes termination grace period !383 - Check if directory exists before recreating it with Windows CMD !435 - Add '--run-tagged-only' cli option for runners !438 - Add armv6l to the ARM replacements list for Docker executor helper image !446 - Add configuration options for Kubernetss resource requests !391 - Add poll interval and timeout parameters for Kubernetes executor !384 - Add support for Git_SUBMODULE_STRATEGY !443 - Create index file for S3 downloads !452 - Add Prometheus metric that counts number of catched errors !439 - Exclude unused options from AbstractExecutor.Build.Options !445 - Update Docker Machine in official Runner images to v0.9.0 !454 - Pass ImagePullSecrets for Kubernetes executor !449 - Add Namespace overwrite possibility for Kubernetes executor !444 ## v1.9.10 (2017-03-23) - Force-terminate VirtualBox and Parallels VMs so snapshot restore works properly !313 ## v1.9.9 (2017-03-03) - Update Docker-machine version to fix coreos provision !500 ## v1.9.8 (2017-02-22) - Update Makefile (fix permissions on /usr/share/GitLab-runner/) !487 ## v1.9.7 (2017-02-20) - Update packages targets !485 ## v1.9.6 (2017-01-25) - Add Ubuntu/yakkety to packages generation list !458 ## v1.9.5 (2017-01-21) - Update Docker Machine in official Runner images to v0.9.0 !454 ## v1.9.4 (2017-01-15) - Add armv6l to the ARM replacements list for Docker executor helper image !446 ## v1.9.3 (2017-01-14) - Fix package's scripts permissions !440 - Check if directory exists before recreating it with Windows CMD !435 ## v1.9.2 (2017-01-04) - Handle Content-Type header with charset information !430 - Don't raise error if machines directory is missing on machines listing !433 ## v1.9.1 (2016-12-24) - Make /usr/share/GitLab-runner/clear-Docker-cache script /bin/sh compatible !427 ## v1.9.0 (2016-12-22) - Add pprof HTTP endpoints to metrics server !398 - Add a multiple Prometheus metrics: !401 - Split prepare stage to be: prepare, Git_clone, restore_cache, download_artifacts !406 - Update CONTRIBUTING.md to refer to go 1.7.1 !409 - Introduce Docker.Client timeouts !411 - Allow network-sourced variables to specify that they should be files !413 - Add a retry mechanism to prevent failed clones in builds !399 - Remove shallow.lock before fetching !407 - Colorize log entries for cmd and PowerShell !400 - Add section describing Docker usage do Kubernetes executor docs !394 - FreeBSD runner installation docs update !387 - Update prompts for register command !377 - Add volume_driver Docker configuration file option !365 - Fix bug permission denied on ci build with external cache !347 - Fix entrypoint for alpine image !346 - Add windows vm checklist for virtualbox documentation !348 - Clarification around authentication with the Kubernetes executor !296 - Fix Docker hanging for Docker-engine 1.12.4 !415 - Use lib machine to fetch a list of Docker-machines !418 - Cleanup Docker cache clear script !388 - Allow the --limit option to control the number of jobs a single runner will run !369 - Store and send last_update value with API calls against GitLab !410 - Add graceful shutdown documentation !421 - Add Kubernete Node Selector !328 - Push prebuilt images to dockerhub !420 - Add path and share cache settings for S3 cache !423 - Remove unnecessary warning about using image with the same ID as provided !424 - Add a link where one can download the packages directly !292 - Kubernetes executor - use pre-build container !425 ## v1.8.8 (2017-02-22) - Update Makefile (fix permissions on /usr/share/GitLab-runner/) !487 ## v1.8.7 (2017-02-20) - Update packages targets !485 ## v1.8.6 (2017-01-25) - Add Ubuntu/yakkety to packages generation list !458 ## v1.8.5 (2017-01-21) - Update Docker Machine in official Runner images to v0.9.0 !454 ## v1.8.4 (2017-01-15) - Add armv6l to the ARM replacements list for Docker executor helper image !446 ## v1.8.3 (2017-01-14) - Fix package's scripts permissions !440 - Check if directory exists before recreating it with Windows CMD !435 ## v1.8.2 (2017-01-04) - Handle Content-Type header with charset information !430 ## v1.8.1 (2016-11-29) - Rrefactor the private container registry docs !392 - Make pull policies usage clear !393 ## v1.8.0 (2016-11-22) - Fix {Bash,Cmd,Ps}Writer.IfCmd to escape its arguments !364 - Fix path to runners-SSH page !368 - Add initial Prometheus metrics server to runner manager !358 - Add a global index.md for docs !371 - Ensure that all builds are executed on tagged runners !374 - Fix broken documentation links !382 - Bug Fix: use a regex to pull out the service and version in the splitServiceAndVersion method !376 - Add FAQ entry about handling the service logon failure on Windows !385 - Fix "unit tests" random failures !370 - Use correct constant for Kubernetes ressource limits. !367 - Unplug stalled endpoints !390 - Add PullPolicy config option for Kubernetes !335 - Handle received 'failed' build state while patching the trace !366 - Add support for using private Docker registries !386 ## v1.7.5 (2017-01-21) - Update Docker Machine in official Runner images to v0.9.0 !454 ## v1.7.4 (2017-01-15) - Add armv6l to the ARM replacements list for Docker executor helper image !446 ## v1.7.3 (2017-01-14) - Fix package's scripts permissions !440 - Check if directory exists before recreating it with Windows CMD !435 ## v1.7.2 (2017-01-04) - Handle Content-Type header with charset information !430 ## v1.7.1 (2016-10-25) - Fix {Bash,Cmd,Ps}Writer.IfCmd to escape its arguments !364 ## v1.7.0 (2016-10-21) - Improve description of --s3-bucket-location option !325 - Use Go 1.7 !323 - Add changelog entries generation script !322 - Add Docker_images release step to CI pipeline !333 - Refactor shell executor tests !334 - Introduce Git_STRATEGY=none !332 - Introduce a variable to enable shell tracing on bash, cmd.exe and PowerShell.exe !339 - Try to load the InCluster config first, if that fails load kubectl config !327 - Squash the "No TLS connection state" warning !343 - Add a benchmark for helpers.ShellEscape and optimise it !351 - Godep: update GitHub.com/Sirupsen/logrus to v0.10.0 !344 - Use Git clone --no-checkout and Git checkout --force !341 - Change machine.machineDetails to machine.Details !353 - Make runner name lowercase to work with GCE restrictions !297 - Add per job before_script handling for exec command !355 - Add OffPeak support for autoscaling !345 - Prevent caching failures from marking a build as failed !359 - Add missed "server" command for MinIO in autoscaled S3 cache tutorial !361 - Add a section for Godep in CONTRIBUTING.md !302 - Add a link to all install documentation files describing how to obtain a registration token !362 - Improve registration behavior !356 - Add the release process description !176 - Fix documentation typo in docs/configuration/advanced-configuration.md !354 - Fix data races around runner health and build stats !352 ## v1.6.1 (2016-09-30) - Add changelog entries generation script !322 - Add Docker_images release step to CI pipeline !333 ## v1.6.0 (2016-09-22) - Remove an unused method from the Docker executor !280 - Add note about certificate concatenation !278 - Restore 755 mode for GitLab-runner-service script !283 - Remove Git-lfs from Docker helper images !288 - Improve Kubernetes support !277 - docs: update troubleshooting section in development. !286 - Windows installation, added a precision on the install command (issue related #1265) !223 - Autodetect "/ci" in URL !289 - Defer removing failed containers until Cleanup() !281 - fix typo in tls-self-signed.md !294 - Improve CI tests !276 - Generate a BuildError when Docker/Kubernetes image is missing !295 - cmd.exe: Caret-escape parentheses when not inside double quotes !284 - Fixed some spelling/grammar mistakes. !291 - Update Go instructions in README !175 - Add APT pinning configuration for Debian in installation docs !303 - Remove YAML v1 !307 - Add options to runner configuration to specify commands executed before code clone and build !106 - Add RC tag support and fix version discovering !312 - Pass all configured CA certificates to builds !299 - Use Git-init templates (clone) and Git config without --global (fetch) to disable recurseSubmodules !314 - Improve Docker machine logging !234 - Add possibility to specify a list of volumes to inherit from another container !236 - Fix range mismatch handling error while patch tracing !319 - Add Docker+machine and Kubernetes executors to "I'm not sure" part of executors README.md !320 - Remove ./Git/index.lock before fetching !316 ## v1.5.3 (2016-09-13) - Fix Caret-escape parentheses when not inside double quotes for Windows cmd - Remove LFS from prebuilt images ## v1.5.2 (2016-08-24) (no changes) ## v1.5.1 (2016-08-24) - Fix file mode of GitLab-runner-service script !283 ## v1.5.0 (2016-08-22) - Update vendored TOML !258 - Release armel instead arm for Debian packages !264 - Improve concurrency of Docker+machine executor !254 - Use .xz for prebuilt Docker images to reduce binary size and provisioning speed of Docker Engines !249 - Remove vendored test files !271 - Update GitLab-runner-service to return 1 when no Host or PORT is defined !253 - Log caching URL address - Retry executor preparation to reduce system failures !244 - Fix missing entrypoint script in alpine Dockerfile !248 - Suppress all but the first warning of a given type when extracting a ZIP file !261 - Mount /builds folder to all services when used with Docker Executor !272 - Cache Docker client instances to avoid a file descriptor leak !260 - Support bind mount of `/builds` folder !193 ## v1.4.3 (2016-09-13) - Fix Caret-escape parentheses when not inside double quotes for Windows cmd - Remove LFS from prebuilt images ## v1.4.2 (2016-08-10) - Fix abort mechanism when patching trace ## v1.4.1 (2016-07-25) - Fix panic while artifacts handling errors ## v1.4.0 (2016-07-22) - Add Sentry support - Add support for cloning VirtualBox VM snapshots as linked clones - Add support for `security_opt` Docker configuration parameter in Docker executor - Add first integration tests for executors - Add many logging improvements (add more details to some logs, move some logs to Debug level, refactorize logger etc.) - Make final build trace upload be done before cleanup - Extend support for caching and artifacts to all executors - Improve support for Docker Machine - Improve build aborting - Refactor common/version - Use `environment` feature in `.gitlab-ci.yml` to track latest versions for Bleeding Edge and Stable - Fix Absolute method for absolute path discovering for bash - Fix zombie issues by using dumb-init instead of GitHub.com/ramr/go-reaper ## v1.3.5 (2016-09-13) - Fix Caret-escape parentheses when not inside double quotes for Windows cmd ## v1.3.4 (2016-07-25) - Fix panic while artifacts handling errors ## v1.3.3 (2016-07-15) - Fix zombie issue by using dumb-init ## v1.3.2 (2016-06-28) - Fix architecture detection bug introduced in 1.3.1 ## v1.3.1 (2016-06-24) - Detect architecture if not given by Docker Engine (versions before 1.9.0) ## v1.3.0 (2016-06-22) - Add incremental build trace update - Add possibility to specify CpusetCpus, Dns and DnsSearch for Docker containers created by runners - Add a custom `User-Agent` header with version number and runtime information (go version, platform, os) - Add artifacts expiration handling - Add artifacts handling for failed builds - Add customizable `check_interval` to set how often to check GitLab for a new builds - Add Docker Machine IP address logging - Make Docker Executor ARM compatible - Refactor script generation to make it fully on-demand - Refactor runnsers Acquire method to improve performance - Fix branch name setting at compile time - Fix panic when generating log message if provision of node fails - Fix Docker host logging - Prevent leaking of goroutines when aborting builds - Restore valid version info in --help message - [Experimental] Add `GIT_STRATEGY` handling - clone/fetch strategy configurable per job - [Experimental] Add `GIT_DEPTH` handling - `--depth` parameter for `git fetch` and `git clone` ## v1.2.0 (2016-05-22) - Use Go 1.6 - Add `timeout` option for the `exec` command - Add runtime platform information to debug log - Add `docker-machine` binary to Runner's official Docker images - Add `build_current` target to Makefile - to build only a binary for used architecture - Add support for `after_script` - Extend version information when using `--version` flag - Extend artifacts download/upload logs with more response data - Extend unregister command to accept runner name - Update shell detection mechanism - Update the GitHub.com/ayufan/golag-kardianos-service dependency - Replace ANSI_BOLD_YELLOW with ANSI_YELLOW color for logging - Reconcile VirtualBox status constants with VBoxManage output values - Make checkout quiet - Make variables to work at job level in exec mode - Remove "user mode" warning when running in a system mode - Create `gitlab-runner` user as a system account - Properly create `/etc/gitlab-runner/certs` in Runner's official Docker images - Disable recursive submodule fetchin on fetching changes - Fix nil casting issue on Docker client creation - Fix used build platforms for `gox` - Fix a limit problems when trying to remove a non-existing machines - Fix S3 caching issues - Fix logging messages on artifacts dowloading - Fix binary panic while using VirtualBox executor with no `vboxmanage` binary available ## v1.1.4 (2016-05-14) - Create /etc/GitLab-runner/certs - Exclude architectures from GOX, rather then including - Update mimio-go to a newest version - Regression: Implement CancelRequest to fix S3 caching support - Fix: Skip removal of machine that doesn't exist (autoscaling) ## v1.1.3 (2016-04-14) - Regression: On Linux use `sh -s /bin/bash user -c` instead of `sh user -c`. This fixes non-login for user. - Regression: Fix user mode warning - Fix: vet installation - Fix: nil casting issue on Docker client creation - Fix: Docker client download issue ## v1.1.2 (2016-04-06) - Regression: revert shell detection mechanism and limit it only to Docker ## v1.1.1 (2016-04-06) - Fix: use different shell detection mechanism - Regression: support for `gitlab-runner exec` - Regression: support for login/non-login shell for Bash ## v1.1.0 (2016-03-29) - Use Go 1.5 - Change license to MIT - Add Docker-machine based auto-scaling for Docker executor - Add support for external cache server - Add support for `sh`, allowing to run builds on images without the `bash` - Add support for passing the artifacts between stages - Add `docker-pull-policy`, it removes the `docker-image-ttl` - Add `docker-network-mode` - Add `git` to GitLab-runner:alpine - Add support for `CapAdd`, `CapDrop` and `Devices` by Docker executor - Add support for passing the name of artifacts archive (`artifacts:name`) - Add support for running runner as system service on OSX - Refactor: The build trace is now implemented by `network` module - Refactor: Remove CGO dependency on Windows - Fix: Create alternative aliases for Docker services (uses `-`) - Fix: VirtualBox port race condition - Fix: Create cache for all builds, including tags - Fix: Make the shell executor more verbose when the process cannot be started - Fix: Pass GitLab-ci.yml variables to build container created by Docker executor - Fix: Don't restore cache if not defined in GitLab-ci.yml - Fix: Always use `json-file` when starting Docker containers - Fix: Error level checking for Windows Batch and PowerShell ## v1.0.4 (2016-02-10) - Fix support for Windows PowerShell ## v1.0.3 (2016-02-08) - Fix support for Windows Batch - Remove Git index lock file: this solves problem with Git checkout being terminated - Hijack Docker.Client to use keep-alives and to close extra connections ## v1.0.2 (2016-01-27) - Fix bad warning about not found untracked files - Don't print error about existing file when restoring the cache - When creating ZIP archive always use forward-slashes and don't permit encoding absolute paths - Prefer to use `path` instead of `filepath` which is platform specific: solves the Docker executor on Windows ## v1.0.1 (2016-01-24) - Use nice log formatting for command line tools - Don't ask for services during registration (we prefer the .GitLab-ci.yml) - Create all directories when extracting the file ## v1.0.0 (2016-01-22) - Add `gitlab-runner exec` command to easy running builds - Add `gitlab-runner status` command to easy check the status of the service - Add `gitlab-runner list` command to list all runners from config file - Allow to specify `ImageTTL` for configuration the frequency of Docker image re-pulling (see advanced-configuration) - Inject TLS certificate chain for `git clone` in build container, the GitLab-runner SSL certificates are used - Remove TLSSkipVerify since this is unsafe option - Add go-reaper to make GitLab-runner to act as init 1 process fixing zombie issue when running Docker container - Create and send artifacts as zip files - Add internal commands for creating and extracting archives without the system dependencies - Add internal command for uploading artifacts without the system dependencies - Use umask in Docker build containers to fix running jobs as specific user - Fix problem with `cache` paths never being archived - Add support for [`cache:key`](https://docs.gitlab.com/ci/yaml/#cachekey) - Add warnings about using runner in `user-mode` - Push packages to all upcoming distributions (Debian/Ubuntu/Fedora) - Rewrite the shell support adding all features to all shells (makes possible to use artifacts and caching on Windows) - Complain about missing caching and artifacts on some executors - Added VirtualBox executor - Embed prebuilt Docker build images in runner binary and load them if needed - Make possible to cache absolute paths (unsafe on shell executor) ## v0.7.2 (2015-11-25) - Adjust `umask` for build image - Use absolute path when executing archive command - Fix regression when variables were not passed to service container - Fix duplicate files in cache or artifacts archive ## v0.7.1 (2015-11-22) - Fix caching support - Suppress tar verbose output ## v0.7.0 (2015-11-21) - Refactor code structure - Refactor bash script adding pre-build and post-build steps - Add support for build artifacts - Add support for caching build directories - Add command to generate archive with cached folders or artifacts - Use separate containers to run pre-build (Git cloning), build (user scripts) and post-build (uploading artifacts) - Expand variables, allowing to use $CI_BUILD_TAG in image names, or in other variables - Make shell executor to use absolute path for project dir - Be strict about code formatting - Move network related code to separate package - Automatically load TLS certificates stored in /etc/GitLab-runner/certs/.crt - Allow to specify tls-ca-file during registration - Allow to disable tls verification during registration ## v0.6.2 (2015-10-22) - Fix PowerShell support - Make more descriptive pulling message - Add version check to Makefile ## v0.6.1 (2015-10-21) - Revert: Fix tags handling when using Git fetch: fetch all tags and prune the old ones ## v0.6.0 (2015-10-09) - Fetch Docker auth from ~/.Docker/config.JSON or ~/.dockercfg - Added support for NTFSSecurity PowerShell module to address problems with long paths on Windows - Make the service startup more readable in case of failure: print a nice warning message - Command line interface for register and run-single accepts all possible config parameters now - Ask about tags and fix prompt to point to GitLab.com/ci - Pin to specific Docker API version - Fix Docker volume removal issue - Add :latest to imageName if missing - Pull Docker images every minute - Added support for SIGQUIT to allow to gracefully finish runner: runner will not accept new jobs, will stop once all current jobs are finished. - Implicitly allow images added as services - Evaluate script command in subcontext, making it to close stdin (this change since 0.5.x where the separate file was created) - Pass container labels to Docker - Force to use go:1.4 for building packages - Fix tags handling when using Git fetch: fetch all tags and prune the old ones - Remove Docker socket from GitLab/GitLab-runner images - Pull (update) images and services every minute - Ignore options from Coordinator that are null - Provide FreeBSD binary - Use -ldflags for versioning - Update go packages - Fix segfault on service checker container - WARNING: By default allow to override image and services ## v0.5.5 (2015-08-26) - Fix cache_dir handling ## v0.5.4 (2015-08-26) - Update go-dockerclient to fix problems with creating Docker containers ## v0.5.3 (2015-08-21) - Pin to specific Docker API version - Fix Docker volume removal issue ## v0.5.2 (2015-07-31) - Fixed CentOS6 service script - Fixed documentation - Added development documentation - Log service messages always to syslog ## v0.5.1 (2015-07-22) - Update link for Docker configuration ## v0.5.0 (2015-07-21) - Allow to override image and services for Docker executor from Coordinator - Added support for additional options passed from coordinator - Added support for receiving and defining allowed images and services from the Coordinator - Rename GitLab_ci_multi_runner to GitLab-runner - Don't require config file to exist in order to run runner - Change where config file is stored: /etc/GitLab-runner/config.TOML (*nix, root), ~/.GitLab-runner/config.TOML (*nix, user) - Create config on service install - Require root to control service on Linux - Require to specify user when installing service - Run service as root, but impersonate as --user when executing shell scripts - Migrate config.TOML from user directory to /etc/GitLab-runner/ - Simplify service installation and upgrade - Add --provides and --replaces to package builder - PowerShell: check exit code in writeCommandChecked - Added installation tests - Add runner alpine-based image - Send executor features with RunnerInfo - Verbose mode by using `echo` instead of `set -v` - Colorize bash output - Set environment variables from bash script: this fixes problem with su - Don't cache Dockerfile VOLUMEs - Pass (public) environment variables received from Coordinator to service containers ## v0.4.2 - Force GC cycle after processing build - Use log-level set to info, but also make `Checking for builds: nothing` being print as debug - Fix memory leak - don't track references to builds ## v0.4.1 - Fixed service reregistration for RedHat systems ## v0.4.0 - Added CI=true and GitLab_CI=true to environment variables - Added output_limit (in kilobytes) to runner config which allows to enlarge default build log size - Added support for custom variables received from CI - Added support for SSH identity file - Optimize build path to make it shorter, more readable and allowing to fix shebang issue - Make the debug log human readable - Make default build log limit set to 4096 (4MB) - Make default concurrent set to 1 - Make default limit for runner set to 1 during registration - Updated kardianos service to fix OSX service installation - Updated logrus to make console output readable on Windows - Change default log level to warning - Make selection of forward or back slashes dependent by shell not by system - Prevent runner to be stealth if we reach the MaxTraceOutputSize - Fixed Windows Batch script when builds are located on different drive - Fixed Windows runner - Fixed installation scripts path - Fixed wrong architecture for i386 Debian packages - Fixed problem allowing commands to consume build script making the build to succeed even if not all commands were executed ## v0.3.4 (2015-06-15) - Create path before clone to fix Windows issue - Added CI=true and GitLab_CI=true - Fixed wrong architecture for i386 Debian packages ## v0.3.3 (2015-05-11) - Push package to Ubuntu/vivid and ol/6 and ol/7 ## v0.3.2 (2015-05-03) - Fixed Windows batch script generator ## v0.3.1 (2015-05-03) - Remove clean_environment (it was working only for shell scripts) - Run bash with --login (fixes missing .profile environment) ## v0.3.0 (2015-05-03) - Added repo slug to build path - Build path includes repository hostname - Support TLS connection with Docker - Default concurrent limit is set to number of CPUs - Make most of the config options optional - Rename setup/delete to register/unregister - Checkout as detached HEAD (fixes compatibility with older Git versions) - Update documentation ## v0.2.0 (2015-04-23) - Added delete and verify commands - Limit build trace size (1MB currently) - Validate build log to contain only valid UTF-8 sequences - Store build log in memory - Integrate with ci.GitLab.com - Make packages for ARM and CentOS 6 and provide beta version - Store Docker cache in separate containers - Support host-based volumes for Docker executor - Don't send build trace if nothing changed - Refactor build class ## v0.1.17 (2015-04-15) - Fixed high file descriptor usage that could lead to error: too many open files ## v0.1.16 (2015-04-13) - Fixed systemd service script ## v0.1.15 (2015-04-11) - Fix order of executor commands - Fixed service creation options - Fixed service installation on OSX ## v0.1.14 (2015-04-07) - Use custom kardianos/service with enhanced service scripts - Remove all system specific packages and use universal for package manager ## v0.1.13 (2015-04-01) - Added abstraction over shells - Moved all bash specific stuff to shells/bash.go - Select default shell for OS (bash for Unix, batch for Windows) - Added Windows Cmd support - Added Windows PowerShell support - Added the kardianos/service which allows to easily run GitLab-ci-multi-runner as service on different platforms - Unregister Parallels VMs which are invalid - Delete Parallels VM if it doesn't contain snapshots - Fixed concurrency issue when assigning unique names ## v0.1.12 (2015-03-20) - Abort all jobs if interrupt or SIGTERM is received - Runner now handles HUP and reloads config on-demand - Refactored runner setup allowing to non-interactive configuration of all questioned parameters - Added CI_PROJECT_DIR environment variable - Make golint happy (in most cases) ## v0.1.11 (2015-03-11) - Package as .deb and .rpm and push it to packagecloud.io (for now) ## v0.1.10 (2015-03-11) - Wait for Docker service to come up (Loïc Guitaut) - Send build log as early as possible ## v0.1.9 (2015-03-10) - Fixed problem with resetting Ruby environment ## v0.1.8 (2015-03-10) - Allow to use prefixed services - Allow to run on Heroku - Inherit environment variables by default for shell scripts - Mute Git messages during checkout - Remove some unused internal messages from build log ## v0.1.7 (2015-02-19) - Fixed Git checkout ## v0.1.6 (2015-02-17) - Remove Docker containers before starting job ## v0.1.5 (2015-02-14) - Added Parallels executor which can use snapshots for fast revert (only OSX supported) - Refactored sources ## v0.1.4 (2015-02-01) - Remove Job and merge it into Build - Introduce simple API server - Ask for services during setup ## v0.1.3 (2015-01-29) - Optimize setup - Optimize multi-runner setup - making it more concurrent - Send description instead of hostname during registration - Don't ask for tags ## v0.1.2 (2015-01-27) - Make it work on Windows ## v0.1.1 (2015-01-27) - Added Docker services ## v0.1.0 (2015-01-27) - Initial public release ================================================ FILE: CONTRIBUTING.md ================================================ ## Developer Certificate of Origin + License By contributing to GitLab Inc., You accept and agree to the following terms and conditions for Your present and future Contributions submitted to GitLab Inc. Except for the license granted herein to GitLab Inc. and recipients of software distributed by GitLab Inc., You reserve all right, title, and interest in and to Your Contributions. All Contributions are subject to the following DCO + License terms. [DCO + License](https://gitlab.com/gitlab-org/dco/blob/master/README.md) All Documentation content that resides under the [docs/ directory](/docs) of this repository is licensed under Creative Commons: [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/). _This notice should stay as the first item in the CONTRIBUTING.md file._ --- ## Contribute to GitLab Runner The following content is an extension of the [GitLab contribution guidelines](https://docs.gitlab.com/development/contributing/). ### How we prioritize MRs from the wider community Currently we use a system of [scoped labels](https://docs.gitlab.com/user/project/labels/#scoped-labels) to help us prioritize which MRs our team will review. | Label | Meaning | Use Cases | | ---- | ----- | ----- | | ~"Review::P1" | Highest priority to review. | Indicates a merge request that might solve an urgent pain point for users, contributes to the strategic direction of Runner development as laid out by the Product team, or fixes a critical issue. A hard cap on the number of contributions labelled ~"Review::P1" is set at 3. | | ~"Review::P2" | Important merge requests. | When a merge request is important, but has lower impact to customers when compared to merge requests labelled ~"Review::P1". | | ~"Review::P3" | Default priority to review. | All incoming merge requests should default to this. | ### Contributing new features that need new or updated `.gitlab-ci.yml` [keywords](https://docs.gitlab.com/ci/yaml/) To execute a job, the GitLab instance processes the `gitlab-ci.yml` configuration and creates a data transfer object, containing only data relevant to a job's execution, that GitLab Runner then receives. Because of this workflow, when you add a keyword that affects the execution of a job, you must make changes in both repositories: GitLab Runner and [GitLab](https://gitlab.com/gitlab-org/gitlab). When a feature needs changes in both repositories, the GitLab Runner team can accept a merge request only if the feature has already been accepted for inclusion in the GitLab repository. - Reviews in both repositories can happen in parallel. - The GitLab project will always dictate and have authority over which keywords are added. - The GitLab project maintainers determine what the behavior will ultimately be. For this reason, before starting a review in the GitLab Runner project, the team requires confirmation that a keyword or a change to a keyword is likely to be accepted. This process helps save time and ensures that we end up with the best solution possible for the problem being solved. ### Contributing new [executors](https://docs.gitlab.com/runner/#selecting-the-executor) We are no longer accepting or developing new executors for a few reasons listed below: - Some executors require licensed software or hardware that GitLab Inc. doesn't have. - Each new executor brings its own set of problems when it comes to testing it properly. - Adding new executors can add new dependencies, which adds maintenance costs. - Having a lot of executors adds to maintenance costs. With GitLab 12.1, we introduced the [custom executor](https://gitlab.com/gitlab-org/gitlab-runner/issues/2885), which will provide a way to create an executor of choice. ### Contributing new hardware architectures We're currently exploring how we can add builds for new and different hardware architectures. Adding and supporting new architectures brings added levels of complexity and may require hardware that GitLab Inc. doesn't have access to. At the current time, new hardware architectures will only be considered if the following criteria are met: 1. GitLab Inc. must be able to build and test for the new architecture on our Shared Runners on GitLab.com 1. If you add support for the new architecture in the helper image, Docker must also support the architecture upstream As we explore adding more architectures, other requirements may come up. We are currently discussing the ability to provide builds for architectures that we don't have the ability to support and [we welcome contributions to that discussion](https://gitlab.com/gitlab-org/gitlab-runner/issues/4229). ### Submitting Merge Requests #### Merge Request titles When submitting a Merge Request please remember that we use the Merge Request titles to generate entries for the [`CHANGELOG.md`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CHANGELOG.md) file. This one line will be the only thing a Runner administrator will see when reviewing the changelog before deciding if an upgrade should be made or not. The administrator may not check the MR description, list of changes, or diff which would give more context. Please make the title clear, concise and informative. A title of `Fixes bug` would not be acceptable, while `Fix timestamp in docker executor job output` would be acceptable. ### Workflow labels We have some additional labels plus those defined in [gitlab-ce workflow labels](https://docs.gitlab.com/development/contributing/issue_workflow/) - Additional subjects: ~cache, ~executors, ~"git operations" - OS: ~"os::Linux" ~"os::macOS" ~"os::FreeBSD" ~"os::Windows" - executor: ~"executor::docker" ~"executor::kubernetes" ~"executor::docker\-machine" ~"executor::shell" ~"executor::parallels" ~"executor::virtualbox" - For any [follow-up issues](https://docs.gitlab.com/development/contributing/issue_workflow/#technical-debt-in-follow-up-issues) created during code review the ~"follow-up" label should be added to keep track of it. ================================================ FILE: Dangerfile ================================================ require "gitlab-dangerfiles" Gitlab::Dangerfiles.for_project(self) do |dangerfiles| dangerfiles.import_plugins dangerfiles.import_dangerfiles end ================================================ FILE: LICENSE ================================================ The MIT License (MIT) Copyright (c) 2015-2019 GitLab Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: Makefile ================================================ NAME ?= gitlab-runner APP_NAME ?= $(NAME) export PACKAGE_NAME ?= $(NAME) export VERSION := $(shell ./ci/version) REVISION := $(shell git rev-parse --short=8 HEAD || echo unknown) BRANCH := $(shell git show-ref | grep "$(REVISION)" | grep -v HEAD | awk '{print $$2}' | sed 's|refs/remotes/origin/||' | sed 's|refs/heads/||' | sort | head -n 1) export TESTFLAGS ?= -cover LATEST_STABLE_TAG := $(shell git -c versionsort.prereleaseSuffix="-rc" -c versionsort.prereleaseSuffix="-RC" tag -l "v*.*.*" | sort -rV | awk '!/rc/' | head -n 1) export IS_LATEST := ifeq ($(shell git describe --exact-match --match $(LATEST_STABLE_TAG) >/dev/null 2>&1; echo $$?), 0) export IS_LATEST := true endif BUILD_ARCHS ?= -arch '386' -arch 'arm' -arch 'amd64' -arch 'arm64' -arch 's390x' -arch 'ppc64le' -arch 'riscv64' -arch 'loong64' BUILD_PLATFORMS ?= -osarch 'darwin/amd64' -osarch 'darwin/arm64' -os 'linux' -os 'freebsd' -os 'windows' ${BUILD_ARCHS} S3_UPLOAD_PATH ?= main ifeq ($(shell mage >/dev/null 2>&1; echo $$?), 0) DEB_ARCHS := $(shell mage package:archs deb) RPM_ARCHS := $(shell mage package:archs rpm) endif PKG = gitlab.com/gitlab-org/$(PACKAGE_NAME) COMMON_PACKAGE_NAMESPACE = $(PKG)/common BUILD_DIR := $(CURDIR) TARGET_DIR := $(BUILD_DIR)/out export MAIN_PACKAGE ?= gitlab.com/gitlab-org/gitlab-runner GO_LDFLAGS ?= -X $(COMMON_PACKAGE_NAMESPACE).NAME=$(APP_NAME) -X $(COMMON_PACKAGE_NAMESPACE).VERSION=$(VERSION) \ -X $(COMMON_PACKAGE_NAMESPACE).BRANCH=$(BRANCH) \ -w GO_TEST_LDFLAGS ?= -X $(COMMON_PACKAGE_NAMESPACE).NAME=$(APP_NAME) GO_FILES ?= $(shell find . -name '*.go') export CGO_ENABLED ?= 0 local := $(PWD)/.tmp localBin := $(local)/bin export GOBIN=$(localBin) export PATH := $(localBin):$(PATH) # Development Tools GOCOVER_COBERTURA = gocover-cobertura MOCKERY_VERSION ?= 3.6.4 MOCKERY = mockery PROTOC := $(localBin)/protoc PROTOC_VERSION := 28.2 PROTOC_GEN_GO := protoc-gen-go PROTOC_GEN_GO_VERSION := v1.36.11 PROTOC_GEN_GO_GRPC := protoc-gen-go-grpc PROTOC_GEN_GO_GRPC_VERSION := v1.6.1 SPLITIC = splitic MAGE = $(localBin)/mage GOLANGLINT_VERSION ?= 2.11.4 GOLANGLINT ?= $(localBin)/golangci-lint GOLANGLINT_GOARGS ?= $(localBin)/goargs.so # Labkit validate-log-fields version LABKIT_VALIDATE_VERSION := v2.0.0-20260331132242-b6ef9bf35f1d GENERATED_FILES_TOOLS = $(MOCKERY) $(PROTOC) $(PROTOC_GEN_GO) $(PROTOC_GEN_GO_GRPC) DEVELOPMENT_TOOLS = $(MOCKERY) $(MAGE) RELEASE_INDEX_GEN_VERSION ?= latest RELEASE_INDEX_GENERATOR ?= $(localBin)/release-index-gen-$(RELEASE_INDEX_GEN_VERSION) GITLAB_CHANGELOG_VERSION ?= latest GITLAB_CHANGELOG = $(localBin)/gitlab-changelog-$(GITLAB_CHANGELOG_VERSION) .PHONY: all all: deps runner-and-helper-bin include Makefile.runner_helper.mk include Makefile.build.mk .PHONY: help help: # Commands: # make all => install deps and build Runner binaries and Helper images # make version - show information about current version # # Development commands: # make development_setup - setup needed environment for tests # make runner-bin-host - build executable for your arch and OS # make runner-and-helper-bin-host - build executable for your arch and OS, including docker dependencies # make runner-and-helper-bin-linux - build executable for all supported architectures for linux OS, including docker dependencies # make runner-and-helper-bin - build executable for all supported platforms, including docker dependencies # make tools - install all dev tools and dependency binaries for local development # # Testing commands: # make test - run project tests # make lint - run code quality analysis # make lint-docs - run documentation linting # # Deployment commands: # make deps - install all dependencies # make runner-bin - build project for all supported platforms # make package - package project using FPM # # Local Docker support commands # make runner-bin-linux - build runner linux binary, on any host OS # make helper-bin-linux - build helper linux binary, on any host OS # make runner-local-image - build gitlab-runner:local docker image # make helper-local-image - build gitlab-runner-helper:local docker image # make runner-and-helper-local-image - same as make runner-local-image helper-local-image .PHONY: version version: @echo Current version: $(VERSION) @echo Current revision: $(REVISION) @echo Current branch: $(BRANCH) @echo Build platforms: $(BUILD_PLATFORMS) @echo DEB archs: $(DEB_ARCHS) @echo RPM archs: $(RPM_ARCHS) @echo IS_LATEST: $(IS_LATEST) .tmp: mkdir -p .tmp .PHONY: deps deps: $(DEVELOPMENT_TOOLS) .PHONY: format format: $(GOLANGLINT) @$(GOLANGLINT) run --fix --output.text.path=stdout --output.text.colors=true ./... .PHONY: lint lint: OUT_FORMAT ?= --output.text.path=stdout --output.text.colors=true lint: LINT_FLAGS ?= lint: $(GOLANGLINT) @$(MAKE) check_test_directives >/dev/stderr @$(GOLANGLINT) run $(OUT_FORMAT) $(LINT_FLAGS) ./... .PHONY: lint-docs lint-docs: @scripts/lint-docs .PHONY: lint-i18n-docs lint-i18n-docs: @scripts/lint-i18n-docs .PHONY: format-ci-yaml format-ci-yaml: prettier --write ".gitlab/ci/*.{yaml,yml}" .PHONY: lint-ci-yaml lint-ci-yaml: prettier --check ".gitlab/ci/**/*.{yml,yaml}" --log-level warn .PHONY: test test: development_setup simple-test .PHONY: test-compile test-compile: go test -count=1 --tags=integration -run=nope ./... go test -count=1 --tags=integration,steps -run=nope ./... go test -count=1 --tags=integration,kubernetes -run=nope ./... go test -count=1 -run=nope ./... .PHONY: validate-log-fields # Validate logging fields using labkit's validate-log-fields tool. validate-log-fields: go run gitlab.com/gitlab-org/labkit/v2/cmd/validate-log-fields@${LABKIT_VALIDATE_VERSION} . simple-test: TEST_PKG ?= $(shell go list ./...) simple-test: # use env -i to clear parent environment variables for go test go test $(TEST_PKG) $(TESTFLAGS) -ldflags "$(GO_LDFLAGS)" mage-test: go test -ldflags "$(GO_LDFLAGS)" -v ./magefiles/... cobertura_report: $(GOCOVER_COBERTURA) $(SPLITIC) mkdir -p out/cobertura mkdir -p out/coverage $(SPLITIC) cover-merge $(wildcard .splitic/cover_?.profile) > out/coverage/coverprofile.regular.source.txt $(SPLITIC) cover-merge $(wildcard .splitic/cover_windows_?.profile) > out/coverage/coverprofile_windows.regular.source.txt GOOS=linux $(GOCOVER_COBERTURA) < out/coverage/coverprofile.regular.source.txt > out/cobertura/cobertura-coverage-raw.xml GOOS=windows $(GOCOVER_COBERTURA) < out/coverage/coverprofile_windows.regular.source.txt > out/cobertura/cobertura-coverage-windows-raw.xml @ # NOTE: Remove package paths. @ # See https://gitlab.com/gitlab-org/gitlab/-/issues/217664 sed 's;filename=\"gitlab.com/gitlab-org/gitlab-runner/;filename=\";g' out/cobertura/cobertura-coverage-raw.xml > \ out/cobertura/cobertura-coverage.xml sed 's;filename=\"gitlab.com/gitlab-org/gitlab-runner/;filename=\";g' out/cobertura/cobertura-coverage-windows-raw.xml > \ out/cobertura/cobertura-windows-coverage.xml export_test_env: @echo "export GO_LDFLAGS='$(GO_LDFLAGS)'" @echo "export MAIN_PACKAGE='$(MAIN_PACKAGE)'" dockerfiles: $(MAKE) -C dockerfiles all .PHONY: generated_files generated_files: $(GENERATED_FILES_TOOLS) rm -rf ./helpers/service/mocks find . -type f -name 'mock_*' -delete find . -type f -name '*.pb.go' -delete go generate -v -x ./... cd ./helpers/runner_wrapper/api && go generate -v -x ./... $(localBin)/$(MOCKERY) check_generated_files: generated_files # Checking the differences @git --no-pager diff --compact-summary --exit-code -- ./helpers/service/mocks \ $(shell git ls-files | grep -e "mock_" -e "\.pb\.go") && \ !(git ls-files -o | grep -e "mock_" -e "\.pb\.go") && \ echo "Generated files up-to-date!" generate_magefiles: $(shell mage generate) check_magefiles: generate_magefiles # Checking the differences @git --no-pager diff --compact-summary --exit-code -- ./magefiles \ $(shell git ls-files | grep '^magefiles/') && \ !(git ls-files -o | grep '^magefiles/') && \ echo "Magefiles up-to-date!" test-docker: $(MAKE) test-docker-image IMAGE=centos:7 TYPE=rpm $(MAKE) test-docker-image IMAGE=debian:wheezy TYPE=deb $(MAKE) test-docker-image IMAGE=debian:jessie TYPE=deb $(MAKE) test-docker-image IMAGE=ubuntu-upstart:precise TYPE=deb $(MAKE) test-docker-image IMAGE=ubuntu-upstart:trusty TYPE=deb $(MAKE) test-docker-image IMAGE=ubuntu-upstart:utopic TYPE=deb test-docker-image: tests/test_installation.sh $(IMAGE) out/$(TYPE)/$(PACKAGE_NAME)_amd64.$(TYPE) tests/test_installation.sh $(IMAGE) out/$(TYPE)/$(PACKAGE_NAME)_amd64.$(TYPE) Y build-and-deploy: ARCH ?= amd64 build-and-deploy: $(MAKE) runner-and-helper-bin BUILD_PLATFORMS="-osarch=linux/$(ARCH)" $(MAKE) package-deb-arch ARCH=$(ARCH) PACKAGE_ARCH=$(ARCH) @[ -z "$(SERVER)" ] && echo "SERVER variable not specified!" && exit 1 scp out/deb/$(PACKAGE_NAME)_$(ARCH).deb $(SERVER): ssh $(SERVER) dpkg -i $(PACKAGE_NAME)_$(ARCH).deb build-and-deploy-binary: ARCH ?= amd64 build-and-deploy-binary: $(MAKE) runner-bin BUILD_PLATFORMS="-osarch=linux/$(ARCH)" @[ -z "$(SERVER)" ] && echo "SERVER variable not specified!" && exit 1 scp out/binaries/$(PACKAGE_NAME)-linux-$(ARCH) $(SERVER):/usr/bin/gitlab-runner release_s3: prepare_windows_zip prepare_zoneinfo release_dir prepare_index # Releasing to S3 @./ci/release_s3 release_dir: @./ci/release_dir prepare_windows_zip: out/binaries/gitlab-runner-windows-386.zip out/binaries/gitlab-runner-windows-amd64.zip out/binaries/gitlab-runner-windows-arm64.zip out/binaries/gitlab-runner-windows-386.zip: out/binaries/gitlab-runner-windows-386.exe zip -j out/binaries/gitlab-runner-windows-386.zip out/binaries/gitlab-runner-windows-386.exe cd out && zip binaries/gitlab-runner-windows-386.zip helper-images/prebuilt-*.tar.xz out/binaries/gitlab-runner-windows-amd64.zip: out/binaries/gitlab-runner-windows-amd64.exe zip -j out/binaries/gitlab-runner-windows-amd64.zip out/binaries/gitlab-runner-windows-amd64.exe cd out && zip binaries/gitlab-runner-windows-amd64.zip helper-images/prebuilt-*.tar.xz out/binaries/gitlab-runner-windows-arm64.zip: out/binaries/gitlab-runner-windows-arm64.exe zip -j out/binaries/gitlab-runner-windows-arm64.zip out/binaries/gitlab-runner-windows-arm64.exe cd out && zip binaries/gitlab-runner-windows-arm64.zip helper-images/prebuilt-*.tar.xz prepare_zoneinfo: # preparing the zoneinfo file @cp $(shell go env GOROOT)/lib/time/zoneinfo.zip out/ prepare_index: export CI_COMMIT_REF_NAME ?= $(BRANCH) prepare_index: export CI_COMMIT_SHA ?= $(REVISION) prepare_index: $(RELEASE_INDEX_GENERATOR) # Preparing index file @$(RELEASE_INDEX_GENERATOR) -working-directory out/release \ -project-version $(VERSION) \ -project-git-ref $(CI_COMMIT_REF_NAME) \ -project-git-revision $(CI_COMMIT_SHA) \ -project-name "GitLab Runner" \ -project-repo-url "https://gitlab.com/gitlab-org/gitlab-runner" \ -gpg-key-env GPG_KEY \ -gpg-password-env GPG_PASSPHRASE run_go_script: export SCRIPT_NAME ?= run_go_script: export DEFAULT_ARGS ?= run_go_script: export ARGS ?= run_go_script: @cd scripts && go run $(SCRIPT_NAME)/main.go \ $(DEFAULT_ARGS) \ $(ARGS) sync_docker_images: export ARGS ?= --concurrency=3 sync_docker_images: @$(MAKE) \ SCRIPT_NAME=sync-docker-images \ DEFAULT_ARGS="--revision $(REVISION)" \ ARGS="$(ARGS)" \ run_go_script check_test_directives: @$(MAKE) \ SCRIPT_NAME=check-test-directives \ ARGS="$(shell pwd)" \ run_go_script update_feature_flags_docs: @$(MAKE) \ SCRIPT_NAME=update-feature-flags-docs \ ARGS="$(shell pwd)" \ run_go_script generate_changelog: export CHANGELOG_RELEASE ?= $(VERSION) generate_changelog: $(GITLAB_CHANGELOG) # Generating new changelog entries @$(GITLAB_CHANGELOG) -project-id 250833 \ -release $(CHANGELOG_RELEASE) \ -starting-point-matcher "v[0-9]*.[0-9]*.[0-9]*" \ -config-file .gitlab/changelog.yml \ -changelog-file CHANGELOG.md check-tags-in-changelog: # Looking for tags in CHANGELOG @git status | grep "On branch main" 2>&1 >/dev/null || echo "Check should be done on main branch only. Skipping." @for tag in $$(git tag | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+$$" | sed 's|v||' | sort -g); do \ state="MISSING"; \ grep "^v $$tag" CHANGELOG.md 2>&1 >/dev/null; \ [ "$$?" -eq 1 ] || state="OK"; \ echo "$$tag: \t $$state"; \ done development_setup: test -d tmp/gitlab-test || git clone https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test tmp/gitlab-test if prlctl --version ; then $(MAKE) -C tests/ubuntu parallels ; fi if vboxmanage --version ; then $(MAKE) -C tests/ubuntu virtualbox ; fi check_modules: # check go.mod and go.sum @git checkout HEAD -- go.mod go.sum @git diff go.mod go.sum > /tmp/gomodsum-$${CI_JOB_ID}-before @go mod tidy @git diff go.mod go.sum > /tmp/gomodsum-$${CI_JOB_ID}-after @diff -U0 /tmp/gomodsum-$${CI_JOB_ID}-before /tmp/gomodsum-$${CI_JOB_ID}-after # check dependency resolution @go list -m all >/dev/null # check helpers/runner_wrapper/api/ go.sum @cd ./helpers/runner_wrapper/api/ @git checkout HEAD -- go.mod go.sum @git diff go.mod go.sum > /tmp/gomodsum-$${CI_JOB_ID}-before @go mod tidy @git diff go.mod go.sum > /tmp/gomodsum-$${CI_JOB_ID}-after @diff -U0 /tmp/gomodsum-$${CI_JOB_ID}-before /tmp/gomodsum-$${CI_JOB_ID}-after # check dependency helpers/runner_wrapper/api/ resolution @go list -m all >/dev/null # development tools $(GOCOVER_COBERTURA): @go install github.com/boumenot/gocover-cobertura@v1.2.0 $(SPLITIC): @go install gitlab.com/gitlab-org/ci-cd/runner-tools/splitic@latest .PHONY: mage mage: $(MAGE) @: $(MAGE): .tmp cd .tmp && \ rm -rf mage && \ git clone https://github.com/magefile/mage && \ cd mage && \ GOPATH=$(local) go run bootstrap.go # Remove the source code once binary built # Go intentionally makes module cache directories read-only to prevent accidental modifications GOPATH=$(local) go clean -modcache rm -rf .tmp/mage .tmp/pkg ifneq ($(GOLANGLINT_VERSION),) $(GOLANGLINT): CHECKOUT_REF := -b v"$(GOLANGLINT_VERSION)" endif $(GOLANGLINT): TOOL_BUILD_DIR := .tmp/build/golangci-lint $(GOLANGLINT): $(GOLANGLINT_GOARGS) $(GOLANGLINT): rm -rf $(TOOL_BUILD_DIR) git clone https://github.com/golangci/golangci-lint.git --no-tags --depth 1 $(CHECKOUT_REF) $(TOOL_BUILD_DIR) cd $(TOOL_BUILD_DIR) && \ export COMMIT=$(shell git rev-parse --short HEAD) && \ export DATE=$(shell date -u '+%FT%TZ') && \ CGO_ENABLED=1 go build --trimpath -o $(GOLANGLINT) \ -ldflags "-s -w -X main.version=v$(GOLANGLINT_VERSION) -X main.commit=$${COMMIT} -X main.date=$${DATE}" \ ./cmd/golangci-lint/ $(GOLANGLINT) --version rm -rf $(TOOL_BUILD_DIR) $(GOLANGLINT_GOARGS): TOOL_BUILD_DIR := .tmp/build/goargs $(GOLANGLINT_GOARGS): rm -rf $(TOOL_BUILD_DIR) git clone https://gitlab.com/gitlab-org/language-tools/go/linters/goargs.git --no-tags --depth 1 $(TOOL_BUILD_DIR) cd $(TOOL_BUILD_DIR) && \ CGO_ENABLED=1 go build --trimpath --buildmode=plugin -o $(GOLANGLINT_GOARGS) plugin/analyzer.go rm -rf $(TOOL_BUILD_DIR) .PHONY: $(MOCKERY) $(MOCKERY): @go install github.com/vektra/mockery/v3@v$(MOCKERY_VERSION) $(PROTOC): OS_TYPE ?= $(shell uname -s | tr '[:upper:]' '[:lower:]' | sed 's/darwin/osx/') $(PROTOC): ARCH_SUFFIX = $(if $(findstring osx,$(OS_TYPE)),universal_binary,x86_64) $(PROTOC): DOWNLOAD_URL = https://github.com/protocolbuffers/protobuf/releases/download/v$(PROTOC_VERSION)/protoc-$(PROTOC_VERSION)-$(OS_TYPE)-$(ARCH_SUFFIX).zip $(PROTOC): TOOL_BUILD_DIR = $(local)/build $(PROTOC): # Installing $(DOWNLOAD_URL) as $(PROTOC) @mkdir -p $(shell dirname $(PROTOC)) @mkdir -p "$(TOOL_BUILD_DIR)" @curl -sL "$(DOWNLOAD_URL)" -o "$(TOOL_BUILD_DIR)/protoc.zip" @unzip "$(TOOL_BUILD_DIR)/protoc.zip" -d "$(TOOL_BUILD_DIR)/" # Moving $(TOOL_BUILD_DIR)/bin/protoc to $(PROTOC) @mv "$(TOOL_BUILD_DIR)/bin/protoc" "$(PROTOC)" @rm -rf "$(TOOL_BUILD_DIR)" # Making $(PROTOC) executable @chmod +x "$(PROTOC)" .PHONY: $(PROTOC_GEN_GO) $(PROTOC_GEN_GO): @go install google.golang.org/protobuf/cmd/protoc-gen-go@$(PROTOC_GEN_GO_VERSION) .PHONY: $(PROTOC_GEN_GO_GRPC) $(PROTOC_GEN_GO_GRPC): @go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@$(PROTOC_GEN_GO_GRPC_VERSION) $(RELEASE_INDEX_GENERATOR): OS_TYPE ?= $(shell uname -s | tr '[:upper:]' '[:lower:]') $(RELEASE_INDEX_GENERATOR): DOWNLOAD_URL = "https://storage.googleapis.com/gitlab-runner-tools/release-index-generator/$(RELEASE_INDEX_GEN_VERSION)/release-index-gen-$(OS_TYPE)-amd64" $(RELEASE_INDEX_GENERATOR): # Installing $(DOWNLOAD_URL) as $(RELEASE_INDEX_GENERATOR) @mkdir -p $(shell dirname $(RELEASE_INDEX_GENERATOR)) @curl -sL "$(DOWNLOAD_URL)" -o "$(RELEASE_INDEX_GENERATOR)" @chmod +x "$(RELEASE_INDEX_GENERATOR)" $(GITLAB_CHANGELOG): OS_TYPE ?= $(shell uname -s | tr '[:upper:]' '[:lower:]') $(GITLAB_CHANGELOG): DOWNLOAD_URL = "https://storage.googleapis.com/gitlab-runner-tools/gitlab-changelog/$(GITLAB_CHANGELOG_VERSION)/gitlab-changelog-$(OS_TYPE)-amd64" $(GITLAB_CHANGELOG): # Installing $(DOWNLOAD_URL) as $(GITLAB_CHANGELOG) @mkdir -p $(shell dirname $(GITLAB_CHANGELOG)) @curl -sL "$(DOWNLOAD_URL)" -o "$(GITLAB_CHANGELOG)" @chmod +x "$(GITLAB_CHANGELOG)" .PHONY: clean clean: -$(RM) -rf $(TARGET_DIR) -$(RM) -rf tmp/gitlab-test print_ldflags: @echo $(GO_LDFLAGS) print_test_ldflags: @echo $(GO_TEST_LDFLAGS) print_image_tags: @tags="$(REVISION)"; \ [ "$(CI_PROJECT_PATH)" = "gitlab-org/gitlab-runner" ] && [ -n "$(CI_COMMIT_TAG)" ] && tags="$$tags $$CI_COMMIT_TAG"; \ [ "$(IS_LATEST)" = "true" ] && tags="$$tags latest"; \ [ "$(CI_PROJECT_PATH)" = "gitlab-org/gitlab-runner" ] && ( \ [ "$(CI_COMMIT_BRANCH)" = "$(CI_DEFAULT_BRANCH)" ] || \ echo "$(CI_COMMIT_REF_NAME)" | grep -Eq '^v[0-9]+\.[0-9]+\.[0-9]+-rc[0-9]+$$' \ ) && tags="$$tags bleeding"; \ echo "$$tags" .PHONY: tools # Install dev tool and dependency binaries for local development. tools: $(GITLAB_CHANGELOG) $(GOCOVER_COBERTURA) $(GOLANGLINT) $(GOLANGLINT_GOARGS) $(MAGE) $(MOCKERY) $(PROTOC) $(PROTOC_GEN_GO) $(PROTOC_GEN_GO_GRPC) $(RELEASE_INDEX_GENERATOR) .PHONY: sync-updated-go-version # Sync the go version in CI files to development docs and scripts sync-updated-go-version: @echo "Updating Go version in documentation and scripts..." $(eval GO_VERSION := $(shell grep 'GO_VERSION:' .gitlab/ci/_common.gitlab-ci.yml | awk '{print $$2}' | tr -d '"')) @echo "Using Go version: $(GO_VERSION)" @sed -i.bak -E 's/go[0-9]+\.[0-9]+\.[0-9]+/go$(GO_VERSION)/g' docs/development/_index.md && rm docs/development/_index.md.bak @sed -i.bak -E 's/go-[0-9]+\.[0-9]+\.[0-9]+/go-$(GO_VERSION)/g' docs/development/_index.md && rm docs/development/_index.md.bak @sed -i.bak -E 's/\$$goVersion = "[0-9]+\.[0-9]+\.[0-9]+"/$$goVersion = "$(GO_VERSION)"/g' scripts/vagrant/provision/base.ps1 && rm scripts/vagrant/provision/base.ps1.bak @echo "Files updated with Go version $(GO_VERSION)" ================================================ FILE: Makefile.build.mk ================================================ BASE_BINARY_PATH := out/binaries/$(NAME) BINARIES := ${BASE_BINARY_PATH}-linux-amd64 BINARIES += ${BASE_BINARY_PATH}-linux-arm64 BINARIES += ${BASE_BINARY_PATH}-linux-386 BINARIES += ${BASE_BINARY_PATH}-linux-arm BINARIES += ${BASE_BINARY_PATH}-linux-s390x BINARIES += ${BASE_BINARY_PATH}-linux-ppc64le BINARIES += ${BASE_BINARY_PATH}-linux-riscv64 BINARIES += ${BASE_BINARY_PATH}-linux-loong64 BINARIES += ${BASE_BINARY_PATH}-darwin-amd64 BINARIES += ${BASE_BINARY_PATH}-darwin-arm64 BINARIES += ${BASE_BINARY_PATH}-freebsd-386 BINARIES += ${BASE_BINARY_PATH}-freebsd-amd64 BINARIES += ${BASE_BINARY_PATH}-freebsd-arm BINARIES += ${BASE_BINARY_PATH}-windows-386.exe BINARIES += ${BASE_BINARY_PATH}-windows-amd64.exe BINARIES += ${BASE_BINARY_PATH}-windows-arm64.exe .PHONY: runner-bin runner-bin: $(BINARIES) .PHONY: runner-bin-fips runner-bin-fips: $(BASE_BINARY_PATH)-linux-amd64-fips .PHONY: runner-images runner-images: $(BINARIES) runner-images: out/runner-images $(BASE_BINARY_PATH)-linux-amd64-fips: GOOS=linux $(BASE_BINARY_PATH)-linux-amd64-fips: GOARCH=amd64 $(BASE_BINARY_PATH)-linux-amd64-fips: GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=1 GOEXPERIMENT=boringcrypto go build -tags fips -ldflags "$(GO_LDFLAGS)" -o $@ $(BASE_BINARY_PATH)-%: GOOS=$(firstword $(subst -, ,$*)) $(BASE_BINARY_PATH)-%: GOARCH=$(lastword $(subst -, ,$(basename $*))) $(BASE_BINARY_PATH)-%: GOOS="$(GOOS)" GOARCH="$(GOARCH)" go build -trimpath -ldflags "$(GO_LDFLAGS)" -o $@ .PHONY: runner-local-image runner-local-image: export LOCAL_ARCH ?= $(shell go env GOARCH) runner-local-image: export LOCAL_FLAVOR ?= alpine-latest runner-local-image: export RUNNER_IMAGES_VERSION ?= $(shell grep "RUNNER_IMAGES_VERSION:" .gitlab/ci/_common.gitlab-ci.yml | awk -F': ' '{ print $$2 }' | tr -d '"') runner-local-image: runner-bin-linux cd dockerfiles/runner && docker buildx bake --progress plain local-image .PHONY: runner-and-helper-local-image runner-and-helper-local-image: runner-local-image helper-local-image out/runner-images: TARGETS ?= ubuntu alpine out/runner-images: docker buildx create --name builder --use --driver docker-container default || true mkdir -p out/runner-images cd dockerfiles/runner && docker buildx bake --progress plain $(TARGETS) ARCH_REPLACE="s/aarch64/arm64/ ; s/armv7l/arm/ ; s/x86_64/amd64/ ; s/i386/386/ ; s/loongarch64/loong64/" runner-bin-host: OS := $(shell uname -s | tr '[:upper:]' '[:lower:]') runner-bin-host: ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-bin-host: $(MAKE) ${BASE_BINARY_PATH}-${OS}-$(ARCH) runner-bin-linux: OS := 'linux' runner-bin-linux: ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-bin-linux: $(MAKE) ${BASE_BINARY_PATH}-${OS}-$(ARCH) runner-and-helper-bin-host: runner-bin-host helper-bin-host runner-and-helper-bin-linux: runner-bin-linux helper-images prebuilt-helper-images runner-and-helper-bin: runner-bin helper-images prebuilt-helper-images runner-and-helper-deb-host: ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-and-helper-deb-host: export BUILD_ARCHS := -arch '$(ARCH)' runner-and-helper-deb-host: PACKAGE_ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-and-helper-deb-host: runner-and-helper-bin-host $(MAGE) package:deps package:prepare $(MAKE) package-deb-arch ARCH=$(ARCH) PACKAGE_ARCH=$(PACKAGE_ARCH) runner-and-helper-rpm-host: ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-and-helper-rpm-host: export BUILD_ARCHS := -arch '$(ARCH)' runner-and-helper-rpm-host: PACKAGE_ARCH := $(shell uname -m | sed $(ARCH_REPLACE)) runner-and-helper-rpm-host: runner-and-helper-bin-host $(MAGE) package:deps package:prepare $(MAKE) package-rpm-arch ARCH=$(ARCH) PACKAGE_ARCH=$(PACKAGE_ARCH) UNIX_ARCHS_CHECK ?= aix/ppc64 android/amd64 dragonfly/amd64 freebsd/amd64 hurd/amd64 illumos/amd64 linux/riscv64 linux/loong64 netbsd/amd64 openbsd/amd64 solaris/amd64 # runner-unix-check compiles against various unix OSs that we don't officially support. This is not used # as part of any CI job at the moment, but is to be used locally to easily determine what currently compiles. runner-unix-check: $(MAKE) $(foreach OSARCH,$(UNIX_ARCHS_CHECK),runner-unix-check-arch-$(subst /,-,$(OSARCH))) runner-unix-check-arch-%: GOOS=$(subst -, GOARCH=,$(subst runner-unix-check-arch-,,$@)) go build -o /dev/null || true ================================================ FILE: Makefile.runner_helper.mk ================================================ # ------------------------------------------------------------------------------- # The following make file does two things: # 1. Create binaries for the gitlab-runner-helper app which can be found in # `./apps/gitlab-runner-helper` for all the platforms we want to support. # 2. Create Linux containers and extract their file system to be used later to # build/publish. # # If you want to add a new arch or OS you would need to add a new # file path to the $BINARIES variables and a new GO_ARCH_{{arch}}-{{OS}} # variable. Note that Linux is implied by default. # --------------------------------------------------------------------------- # Binaries that we support for the helper image. We are using the following # pattern match: # out/binaries/gitlab-runner-helper/gitlab-runner-helper.{{os}}-{{arch}} BASE_BINARY_PATH := out/binaries/gitlab-runner-helper/gitlab-runner-helper BINARIES := ${BASE_BINARY_PATH}.windows-amd64.exe BINARIES += ${BASE_BINARY_PATH}.linux-amd64 BINARIES += ${BASE_BINARY_PATH}.linux-arm BINARIES += ${BASE_BINARY_PATH}.linux-arm64 BINARIES += ${BASE_BINARY_PATH}.linux-s390x BINARIES += ${BASE_BINARY_PATH}.linux-ppc64le BINARIES += ${BASE_BINARY_PATH}.linux-riscv64 BINARIES += ${BASE_BINARY_PATH}.linux-loong64 BINARIES += ${BASE_BINARY_PATH}.linux-amd64-fips # Go files that are used to create the helper binary. HELPER_GO_FILES ?= $(shell find apps/gitlab-runner-helper commands common log network -name '*.go') # Used in the helper-bin-linux target for building a # local docker image. If set as a target-specific variable, # it isn't in place to impact the name of the prerequisite, # which results in a prereq of ${BASE_BINARY_PATH}.linux- # which in turn gets interpretted as GOOS=linux, GOARCH=linux LOCAL_ARCH ?= $(shell go env GOARCH) # Build the Runner Helper binaries for the host platform. .PHONY: helper-bin-host helper-bin-host: ${BASE_BINARY_PATH}.$(shell go env GOOS)-$(shell go env GOARCH) # Build the Runner Helper binaries for the linux OS and host architecture. .PHONY: helper-bin-linux helper-bin-linux: ${BASE_BINARY_PATH}.linux-$(LOCAL_ARCH) # Build the Runner Helper binaries for all supported platforms. .PHONY: helper-bin helper-bin: $(BINARIES) .PHONY: helper-bin-fips helper-bin-fips: ${BASE_BINARY_PATH}.linux-amd64-fips .PHONY: helper-images helper-images: $(BINARIES) helper-images: out/helper-images .PHONY: helper-local-image helper-local-image: export LOCAL_ARCH ?= $(shell go env GOARCH) helper-local-image: export LOCAL_FLAVOR ?= alpine-latest helper-local-image: export RUNNER_IMAGES_VERSION ?= $(shell grep "RUNNER_IMAGES_VERSION:" .gitlab/ci/_common.gitlab-ci.yml | awk -F': ' '{ print $$2 }' | tr -d '"') helper-local-image: helper-bin-linux cd dockerfiles/runner-helper && docker buildx bake --progress plain local-image local-image-concrete # Make sure the fips target is first since it's less general ${BASE_BINARY_PATH}.linux-amd64-fips: GOOS=linux ${BASE_BINARY_PATH}.linux-amd64-fips: GOARCH=amd64 ${BASE_BINARY_PATH}.linux-amd64-fips: APP_NAME := "gitlab-runner-helper" ${BASE_BINARY_PATH}.linux-amd64-fips: $(HELPER_GO_FILES) GOOS=$(GOOS) GOARCH=$(GOARCH) CGO_ENABLED=1 GOEXPERIMENT=boringcrypto go build -tags fips -trimpath -ldflags "$(GO_LDFLAGS)" -o $@ $(PKG)/apps/gitlab-runner-helper $(BASE_BINARY_PATH)-%: GOOS=$(firstword $(subst -, ,$*)) $(BASE_BINARY_PATH)-%: GOARCH=$(lastword $(subst -, ,$(basename $*))) $(BASE_BINARY_PATH)-%: APP_NAME := "gitlab-runner-helper" ${BASE_BINARY_PATH}.%: $(HELPER_GO_FILES) GOOS="$(GOOS)" GOARCH="$(GOARCH)" go build -trimpath -ldflags "$(GO_LDFLAGS)" -o $@ $(PKG)/apps/gitlab-runner-helper out/helper-images: TARGETS ?= alpine alpine-pwsh ubuntu ubuntu-pwsh out/helper-images: docker buildx create --name builder --use --driver docker-container default || true mkdir -p out/helper-images cd dockerfiles/runner-helper && docker buildx bake --progress plain $(TARGETS) .PHONY: prebuilt-helper-images prebuilt-helper-images: ALPINE_DEFAULT_VERSION="-latest" prebuilt-helper-images: @find out/helper-images -maxdepth 1 -name "*.tar" | parallel -j$(shell nproc) './ci/prebuilt_helper_image {}' @for file in out/helper-images/prebuilt-alpine$(ALPINE_DEFAULT_VERSION)-*.tar.xz; do \ if [ -e "$${file}" ]; then \ target=$$(echo "$${file}" | sed -e 's/'$(ALPINE_DEFAULT_VERSION)'//'); \ cp "$${file}" "$${target}"; \ fi; \ done ================================================ FILE: NOTICE ================================================ With regard to the GitLab Software: The MIT License (MIT) Copyright (c) 2015-2019 GitLab B.V. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. --- For all third party components incorporated into the GitLab Software, those components are licensed under the original license provided by the owner of the applicable component. --- All Documentation content that resides under the docs/ directory of this repository is licensed under Creative Commons: CC BY-SA 4.0. ================================================ FILE: PROCESS.md ================================================ ## GitLab core team & GitLab Inc. contribution process --- **Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* - [Be kind](#be-kind) - [Feature freeze on the 7th for the release on the 22nd](#feature-freeze-on-the-7th-for-the-release-on-the-22nd) - [Between the 1st and the 7th](#between-the-1st-and-the-7th) - [What happens if these deadlines are missed?](#what-happens-if-these-deadlines-are-missed) - [On the 7th](#on-the-7th) - [After the 7th](#after-the-7th) - [Asking for an exception](#asking-for-an-exception) - [Bugs](#bugs) - [Regressions](#regressions) - [Managing bugs](#managing-bugs) - [Supported releases](#supported-releases) - [Releasing GitLab Runner](#releasing-gitlab-runner) - [Security release](#security-release) - [Renew expired GPG key](#renew-expired-gpg-key) - [Copy & paste responses](#copy--paste-responses) - [Improperly formatted issue](#improperly-formatted-issue) - [Issue report for old version](#issue-report-for-old-version) - [Support requests and configuration questions](#support-requests-and-configuration-questions) - [Code format](#code-format) - [Issue fixed in newer version](#issue-fixed-in-newer-version) - [Improperly formatted merge request](#improperly-formatted-merge-request) - [Accepting merge requests](#accepting-merge-requests) - [Only accepting merge requests with green tests](#only-accepting-merge-requests-with-green-tests) --- ## Be kind Be kind to people trying to contribute. Be aware that people may be a non-native English speaker, they might not understand things or they might be very sensitive as to how you word things. Use Emoji to express your feelings (heart, star, smile, etc.). Some good tips about code reviews can be found in our [Code Review Guidelines]. [Code Review Guidelines]: https://docs.gitlab.com/development/code_review/ ## Feature freeze on the 7th for the release on the 22nd After 7th at 23:59 (Pacific Time Zone) of each month, stable branch and RC1 of the upcoming release (to be shipped on the 22nd) is created and deployed to GitLab.com. The stable branch is frozen at the most recent "qualifying commit" on `main`. A "qualifying commit" is one that is pushed before the feature freeze cutoff time and that passes all CI jobs (green pipeline). Merge requests may still be merged into `main` during this period, but they will go into the _next_ release, unless they are manually cherry-picked into the stable branch. By freezing the stable branches 2 weeks prior to a release, we reduce the risk of a last minute merge request potentially breaking things. Any release candidate that gets created after this date can become a final release, hence the name release candidate. ### Between the 1st and the 7th These types of merge requests for the upcoming release need special consideration: - **Large features**: a large feature is one that is highlighted in the kick-off and the release blogpost; typically this will have its own channel in Slack and a dedicated team with front-end, back-end, and UX. - **Small features**: any other feature request. It is strongly recommended that **large features** be with a maintainer **by the 1st**. This means that: - There is a merge request (even if it's WIP). - The person (or people, if it needs a frontend and backend maintainer) who will ultimately be responsible for merging this have been pinged on the MR. It's OK if merge request isn't completely done, but this allows the maintainer enough time to make the decision about whether this can make it in before the freeze. If the maintainer doesn't think it will make it, they should inform the developers working on it and the Product Manager responsible for the feature. The maintainer can also choose to assign a reviewer to perform an initial review, but this way the maintainer is unlikely to be surprised by receiving an MR later in the cycle. It is strongly recommended that **small features** be with a reviewer (not necessarily a maintainer) **by the 3rd**. Most merge requests from the community do not have a specific release target. However, if one does and falls into either of the above categories, it's the reviewer's responsibility to manage the above communication and assignment on behalf of the community member. Every new feature or change should be shipped with its corresponding documentation in accordance with the [documentation process](https://docs.gitlab.com/development/documentation/feature-change-workflow/) and [structure](https://docs.gitlab.com/development/documentation/topic_types/) guides. Note that a technical writer will review all changes to documentation. This can occur in the same MR as the feature code, but [if there is not sufficient time or need, it can be planned via a follow-up issue for doc review](https://docs.gitlab.com/development/documentation/workflow/#post-merge-reviews), and another MR, if needed. Regardless, complete docs must be merged with code by the freeze. #### What happens if these deadlines are missed? If a small or large feature is _not_ with a maintainer or reviewer by the recommended date, this does _not_ mean that maintainers or reviewers will refuse to review or merge it, or that the feature will definitely not make it in before the feature freeze. However, with every day that passes without review, it will become more likely that the feature will slip, because maintainers and reviewers may not have enough time to do a thorough review, and developers may not have enough time to adequately address any feedback that may come back. A maintainer or reviewer may also determine that it will not be possible to finish the current scope of the feature in time, but that it is possible to reduce the scope so that something can still ship this month, with the remaining scope moving to the next release. The sooner this decision is made, in conversation with the Product Manager and developer, the more time there is to extract that which is now out of scope, and to finish that which remains in scope. For these reasons, it is strongly recommended to follow the guidelines above, to maximize the chances of your feature making it in before the feature freeze, and to prevent any last minute surprises. ### On the 7th Merge requests should still be complete, following the [definition of done](https://docs.gitlab.com/development/contributing/merge_request_workflow/#definition-of-done). If a merge request is not ready, but the developers and Product Manager responsible for the feature think it is essential that it is in the release, they can [ask for an exception](#asking-for-an-exception) in advance. This is preferable to merging something that we are not confident in, but should still be a rare case: most features can be allowed to slip a release. ### After the 7th Once the stable branch is frozen, the only MRs that can be cherry-picked into the stable branch are: - Fixes for [regressions](#regressions) where the affected version `xx.x` in `regression:xx.x` is the current release. See [Managing bugs](#managing-bugs) section. - Fixes for security issues. - Fixes or improvements to automated QA scenarios. - [Documentation improvements](https://docs.gitlab.com/development/documentation/workflow/) for feature changes made in the same release, though initial docs for these features should have already been merged by the freeze, as required. - New or updated translations (as long as they do not touch application code). - Changes that are behind a feature flag and have the ~"feature flag" label. During the feature freeze all merge requests that are meant to go into the upcoming release should have the correct milestone assigned _and_ the `Pick into X.Y` label where `X.Y` is equal to the milestone, so that release managers can find and pick them. Merge requests without this label will not be picked into the stable release. For example, if the upcoming release is `10.2.0` you will need to set the `Pick into 10.2` label. Fixes marked like this will be shipped in the next RC (before the 22nd), or the next patch release. If a merge request is to be picked into more than one release it will need one `Pick into X.Y` label per release where the merge request should be back-ported to. For example: - `Pick into 10.1` - `Pick into 10.0` - `Pick into 9.5` ### Asking for an exception If you think a merge request should go into an RC or patch even though it does not meet these requirements, you can ask for an exception to be made, by opening an isssue and tagging the Release Manager. To find out who the current Release Manager is find the latest release checklist inside the issue tracker with the ~release label. For example [this issues](https://gitlab.com/gitlab-org/gitlab-runner/issues/4333) specifies that `@tmaczukin` is the release manager for 12.0. ## Bugs A ~bug is a defect, error, failure which causes the system to behave incorrectly or prevents it from fulfilling the product requirements. The level of impact of a ~bug can vary from blocking a whole functionality or a feature usability bug. A bug should always be linked to a severity level. Refer to our [severity levels](https://docs.gitlab.com/development/labels/#severity-labels) Whether the bug is also a regression or not, the triage process should start as soon as possible. Ensure that the Engineering Manager and/or the Product Manager for the relative area is involved to prioritize the work as needed. ### Regressions A ~regression implies that a previously **verified working functionality** no longer works. Regressions are a subset of bugs. We use the ~regression label to imply that the defect caused the functionality to regress. The label tells us that something worked before and it needs extra attention from Engineering and Product Managers to schedule/reschedule. The regression label does not apply to ~bugs for new features for which functionality was **never verified as working**. These, by definition, are not regressions. A regression should always have the `regression:xx.x` label on it to designate when it was introduced. Regressions should be considered high priority issues that should be solved as soon as possible, especially if they have severe impact on users. ### Managing bugs **Prioritization:** We give higher priority to regressions on features that worked in the last recent monthly release and the current release candidates, for example: - A regression which worked in the **Last monthly release** - **Example:** In 11.0 we released a new `feature X` that is verified as working. Then in release 11.1 the feature no longer works, this is regression for 11.1. The issue should have the `regression:11.1` label. - *Note:* When we say `the last recent monthly release`, this can refer to either the version currently running on GitLab.com, or the most recent version available in the package repositories. - A regression which worked in the **Current release candidates** - **Example:** In 11.1-RC3 we shipped a new feature which has been verified as working. Then in 11.1-RC5 the feature no longer works, this is regression for 11.1. The issue should have the `regression:11.1` label. - *Note:* Because GitLab.com runs release candidates of new releases, a regression can be reported in a release before its 'official' release date on the 22nd of the month. When a bug is found: 1. Create an issue describing the problem in the most detailed way possible. 1. If possible, provide links to real examples and how to reproduce the problem. 1. Label the issue properly, using the [team label](https://docs.gitlab.com/development/labels/#team-labels), the [subject label](https://docs.gitlab.com/development/contributing/issue_workflow/#subject-labels) and any other label that may apply in the specific case 1. Notify the respective Engineering Manager to evaluate and apply the [Severity label](https://docs.gitlab.com/development/labels/#severity-labels) and [Priority label](https://docs.gitlab.com/development/labels/#priority-labels). The counterpart Product Manager is included to weigh-in on prioritization as needed. 1. If the ~bug is **NOT** a regression: 1. The Engineering Manager decides which milestone the bug will be fixed. The appropriate milestone is applied. 1. If the bug is a ~regression: 1. Determine the release that the regression affects and add the corresponding `regression:xx.x` label. 1. If the affected release version can't be determined, add the generic ~regression label for the time being. 1. If the affected version `xx.x` in `regression:xx.x` is the **current release**, it's recommended to schedule the fix for the current milestone. 1. This falls under regressions which worked in the last release and the current RCs. More detailed explanations in the **Prioritization** section above. 1. If the affected version `xx.x` in `regression:xx.x` is older than the **current release** 1. If the regression is an ~S1 severity, it's recommended to schedule the fix for the current milestone. We would like to fix the highest severity regression as soon as we can. 1. If the regression is an ~S2, ~S3 or ~S4 severity, the regression may be scheduled for later milestones at the discretion of the Engineering Manager and Product Manager. ## Supported releases The _last three releases_ are supported. Meaning if the latest version is `11.11`, the supported versions are `11.11`, `11.10`, `11.9` Each support requests for previous versions will be closed with a ~wontfix label. **What is supported?** By the _release support_ we understand: - fixes for security bugs - fixes for other bugs - requests for documentation - questions of type _"How can I ...?"_ related to a supported version Proposals for new features or improvements are welcome, but will be not prepared for supported releases. Instead - if we decide to implement them - they will be planned for one of the upcoming releases. ## Releasing GitLab Runner All the technical details of how the Runner is released can be found in the [Release Checklist](https://gitlab.com/gitlab-org/ci-cd/runner-release-helper/-/tree/main/templates/issues) which is split into multiple templates. ### Security release In addition to the Release Manager, the security process involves many other people and roles. We follow the GitLab Security process with the following exceptions. - [Overview](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/process.md) - To create the release task issue, we use a different command than `/chatops run release prepare --security`. - [Developer](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/developer.md) - For mentions of `gitlab-org/gitlab` assume `gitlab-org/gitlab-runner` and for `gitlab-org/security/gitlab` assume `gitlab-org/security/gitlab-runner`. - We have our own [Security Implementation Issue](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/developer.md#security-implementation-issue) that can be found [here](https://gitlab.com/gitlab-org/security/gitlab-runner/-/issues/new?issuable_template=Security+developer+workflow). - [Release Manager](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/release-manager.md) - To create the security release task, run this command: ```shell # Using rrhelper https://gitlab.com/gitlab-org/ci-cd/runner-release-helper # $LINK_TO_MAIN_RELEASE_ISSUE can found in the #releases slack channel rrhelper create-security-release-checklist --runner-tags 13.2.2,13.1.2,13.0.2 --helm-tags 0.19.2,0.18.2,0.17.2 --project-id 250833 --security-url $LINK_TO_MAIN_RELEASE_ISSUE` ``` - [Security Engineer](https://gitlab.com/gitlab-org/release/docs/-/blob/master/general/security/security-engineer.md) - The Runner Application Security Engineer part is listed [here](https://about.gitlab.com/handbook/product/product-categories/#runner-group). ## Renew expired GPG key We sign all of our packages with GPG, and this key is short-lived (1 year) so every year we have to renew it. For this, we have a tool called [Key expiration wrapper](https://gitlab.com/gitlab-org/ci-cd/runner-tools/key-expiration-wrapper) that documents and automates the process. ## Copy & paste responses ### Improperly formatted issue ``` Thank you for the issue report. Please reformat your issue to conform to the [contribution guidelines](https://docs.gitlab.com/development/contributing/issue_workflow/#issue-tracker-guidelines). ``` ### Issue report for old version ``` Thank you for the issue report. We only support issues for the latest stable version of GitLab. I'm closing this issue, however if you still experience this problem in the latest stable version, please open a new issue (and please reference the old issue(s)). Make sure to also include the necessary debugging information conforming to the issue tracker guidelines found in our [contribution guidelines](https://docs.gitlab.com/development/contributing/issue_workflow/#issue-tracker-guidelines). ``` ### Support requests and configuration questions ``` Thank you for your interest in GitLab. We don't use the issue tracker for support requests and configuration questions. Please check our [Support](https://about.gitlab.com/support/) page to see all of the available support options. Also, have a look at the [contribution guidelines](https://docs.gitlab.com/development/contributing/) for more information. You can read more about this policy in our [README.md](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/README.md#closing-issues) ``` ### Code format ``` Please enclose console output, logs, and code in backticks (`` ` ``), as it's very hard to read otherwise. For more information, read our [guide on code and codeblocks in markdown](https://docs.gitlab.com/development/documentation/styleguide/#code-blocks) ``` ### Issue fixed in newer version ``` Thank you for the issue report. This issue has already been fixed in newer versions of GitLab. Due to the size of this project and our limited resources we are only able to support the latest stable release as outlined in our [contribution guidelines](https://docs.gitlab.com/development/contributing/issue_workflow/). In order to get this bug fix and enjoy many new features please [upgrade](https://gitlab.com/gitlab-org/gitlab-ce/tree/master/doc/update). If you still experience issues at that time, please open a new issue following our issue tracker guidelines found in the [contribution guidelines](https://docs.gitlab.com/development/contributing/issue_workflow/#issue-tracker-guidelines). ``` ### Improperly formatted merge request ``` Thanks for your interest in improving the GitLab codebase! Please update your merge request according to the [contribution guidelines](https://gitlab.com/gitlab-org/gitlab-ce/blob/master/doc/development/contributing/merge_request_workflow.md#merge-request-guidelines). ``` ### Accepting merge requests ``` Is there an issue on the [issue tracker](https://gitlab.com/gitlab-org/gitlab-ce/issues) that is similar to this? Could you please link it here? Please be aware that new functionality that is not marked [`Accepting merge requests`](https://docs.gitlab.com/development/labels/#label-for-community-contributors) might not make it into GitLab. ``` ### Only accepting merge requests with green tests ``` We can only accept a merge request if all the tests are green. I've just restarted the build. If the tests are still not green after this restart and you're sure that is does not have anything to do with your code changes, please rebase with main to see if that solves the issue. ``` ================================================ FILE: PROVENANCE.md ================================================ # GitLab CI provenance This is an official [SLSA Provenance](https://slsa.dev/provenance/v1) `buildType` that describes the execution of a GitLab [CI/CD job](https://docs.gitlab.com/ci/jobs/). This definition is hosted and maintained by GitLab. When enabled with the `RUNNER_GENERATE_ARTIFACTS_METADATA` CI/CD variable, the runner produces [SLSA provenance v1.0](https://slsa.dev/spec/v1.0/provenance) statements. ## Description ```jsonc "buildType": "https://gitlab.com/gitlab-org/gitlab-runner/-/blob/{GITLAB_RUNNER_VERSION}/PROVENANCE.md" ``` This `buildType` describes the execution of a workflow that builds a software artifact. > [!note] > Consumers should ignore unrecognized external parameters. Any changes must > not change the semantics of existing external parameters. ## Build Definition ### Internal and external parameters Both internal and external parameters are documented in the [Configuring runners documentation](https://docs.gitlab.com/ci/runners/configure_runners/#provenance-metadata-format). An example provenance statement can also be found in that page. ================================================ FILE: Procfile ================================================ web: gitlab-runner run-single -addr=":$PORT" -builds-dir="/tmp" ================================================ FILE: README.md ================================================ # GitLab Runner This is the repository of the official GitLab Runner written in Go. It executes tests and sends the results to GitLab. [GitLab CI](https://about.gitlab.com/gitlab-ci) is the open-source continuous integration service included with GitLab that coordinates the testing. The old name of this project was GitLab CI Multi Runner but please use "GitLab Runner" (without CI) from now on. [![Pipeline Status](https://gitlab.com/gitlab-org/gitlab-runner/badges/main/pipeline.svg)](https://gitlab.com/gitlab-org/gitlab-runner/commits/main) [![Go Report Card](https://goreportcard.com/badge/gitlab.com/gitlab-org/gitlab-runner)](https://goreportcard.com/report/gitlab.com/gitlab-org/gitlab-runner) ## Runner and GitLab CE/EE compatibility For a list of compatible versions between GitLab and GitLab Runner, consult the [compatibility section](https://docs.gitlab.com/runner/#gitlab-runner-versions). ## Release process The description of the release process for the GitLab Runner project can be found in [`PROCESS.md`](PROCESS.md). ## Contributing Contributions are welcome, see [`CONTRIBUTING.md`](CONTRIBUTING.md) for more details. ### Closing issues GitLab is growing very fast and we have limited resources to deal with issues opened by community volunteers. We appreciate all the contributions coming from our community, but we need to create some closing policy to help all of us with issue management. The issue tracker is not used for support or configuration questions. We have dedicated [channels](https://about.gitlab.com/support/) for these kinds of questions. The issue tracker should only be used for feature requests, bug reports, and other tasks that need to be done for the Runner project. It is up to a project maintainer to decide if an issue is actually a support/configuration question. Before closing the issue the maintainer should leave a reason why this is a support/configuration question, to make it clear to the issue author. They should also leave a comment using [our template](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/PROCESS.md#support-requests-and-configuration-questions) before closing the issue. The issue author has every right to disagree and reopen the issue for further discussion. ## Documentation The documentation source files can be found under the [docs/](docs/) directory. You can read the documentation online at . ## Features [Read about the features of GitLab Runner.](https://docs.gitlab.com/runner/#features) ## Executors compatibility chart [Read about what options each executor can offer.](https://docs.gitlab.com/runner/executors/#compatibility-chart) ## Install GitLab Runner Visit the [installation documentation](https://docs.gitlab.com/runner/install/). ## Use GitLab Runner See [https://docs.gitlab.com/runner/commands/](https://docs.gitlab.com/runner/commands/). ## Select executor See [https://docs.gitlab.com/runner/executors/#selecting-the-executor](https://docs.gitlab.com/runner/executors/#selecting-the-executor). ## Troubleshooting Read the [FAQ](https://docs.gitlab.com/runner/faq/). ## Advanced Configuration See [https://docs.gitlab.com/runner/configuration/advanced-configuration/](https://docs.gitlab.com/runner/configuration/advanced-configuration/). ## Building and development See [https://docs.gitlab.com/runner/development/](https://docs.gitlab.com/runner/development/). ## Changelog Visit the [Changelog](CHANGELOG.md) to view recent changes. ## The future - Please see the [GitLab Direction page](https://about.gitlab.com/direction/). - Feel free to submit issues with feature proposals on the issue tracker. ## Author - 2014 - 2015 : [Kamil Trzciński](mailto:ayufan@ayufan.eu) - 2015 - now : GitLab Inc. team and contributors ## License This code is distributed under the MIT license, see the [LICENSE](LICENSE) file. ================================================ FILE: VERSION ================================================ 19.0.0 ================================================ FILE: Vagrantfile ================================================ # -*- mode: ruby -*- # vi: set ft=ruby : # Check if the required plugins are installed. unless Vagrant.has_plugin?('vagrant-reload') puts 'vagrant-reload plugin not found, installing' system 'vagrant plugin install vagrant-reload' # Restart the process with the plugin installed. exec "vagrant #{ARGV.join(' ')}" end def get_vm_box_version() # We're pinning to this specific version due to recent Docker versions (above 19.03.05) being broken # (see https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27115) '2020.04.15' end Vagrant.configure('2') do |config| config.vm.define 'windows_server', primary: true do |cfg| cfg.vm.box = 'StefanScherer/windows_2019_docker' cfg.vm.box_version = get_vm_box_version() cfg.vm.communicator = 'winrm' cfg.vm.synced_folder '.', 'C:\GitLab-Runner' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/base.ps1' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/install_PSWindowsUpdate.ps1' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/windows_update.ps1' # Restart the box to install the updates, and update again. cfg.vm.provision :reload cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/windows_update.ps1' cfg.vm.provision :reload cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/enable_sshd.ps1' end config.vm.define 'windows_10', autostart: false do |cfg| cfg.vm.box = 'StefanScherer/windows_10' cfg.vm.box_version = get_vm_box_version() cfg.vm.communicator = 'winrm' cfg.vm.synced_folder '.', 'C:\GitLab-Runner' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/base.ps1' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/enable_developer_mode.ps1' cfg.vm.provision 'shell', path: 'scripts/vagrant/provision/enable_sshd.ps1' end config.vm.provider 'virtualbox' do |vb| vb.gui = false vb.memory = '2048' vb.cpus = 1 vb.linked_clone = true end end ================================================ FILE: apps/gitlab-runner-helper/main.go ================================================ package main import ( "os" "path/filepath" "github.com/KimMachineGun/automemlimit/memlimit" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers" "gitlab.com/gitlab-org/gitlab-runner/commands/steps" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/log" ) func init() { memlimit.SetGoMemLimitWithEnv() } func main() { defer func() { if r := recover(); r != nil { // log panics forces exit if _, ok := r.(*logrus.Entry); ok { os.Exit(1) } panic(r) } }() app := cli.NewApp() app.Name = filepath.Base(os.Args[0]) app.Usage = "a GitLab Runner Helper" app.Version = common.AppVersion.ShortLine() cli.VersionPrinter = common.AppVersion.Printer app.Authors = []cli.Author{ { Name: "GitLab Inc.", Email: "support@gitlab.com", }, } app.Commands = newCommands() app.CommandNotFound = func(context *cli.Context, command string) { logrus.Fatalln("Command", command, "not found") } log.ConfigureLogging(app) if err := app.Run(os.Args); err != nil { logrus.Fatal(err) } } func newCommands() []cli.Command { return []cli.Command{ helpers.NewArtifactsDownloaderCommand(), helpers.NewArtifactsUploaderCommand(), helpers.NewCacheArchiverCommand(), helpers.NewCacheExtractorCommand(), helpers.NewCacheInitCommand(), helpers.NewHealthCheckCommand(), helpers.NewProxyExecCommand(), helpers.NewReadLogsCommand(), steps.NewCommand(), } } ================================================ FILE: argo_translation.yml ================================================ source_language: en-us target_languages: [fr-fr, ja-jp, ko-kr] argo_request_key: GITTECH translation_mr_labels: [gitlab-translation-service] translation_mr_branch_name: docs-i18n/ components: - name: Technical Documentation create_request: false paths: - source: "docs/**/*.md" target: "docs-locale/{{language}}/" ignored_paths: - "docs/.*/**" - 'docs/development/**' ================================================ FILE: cache/adapter.go ================================================ package cache import ( "context" "fmt" "net/http" "net/url" "sync" "time" "github.com/prometheus/client_golang/prometheus" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type PresignedURL struct { URL *url.URL Headers http.Header } type GoCloudURL struct { URL *url.URL // Environment holds the environment variables needed to access the URL. Environment map[string]string } type Adapter interface { GetDownloadURL(context.Context) PresignedURL GetHeadURL(context.Context) PresignedURL WithMetadata(map[string]string) GetUploadURL(context.Context) PresignedURL GetGoCloudURL(ctx context.Context, upload bool) (GoCloudURL, error) } type Factory func(config *cacheconfig.Config, timeout time.Duration, objectName string) (Adapter, error) type FactoriesMap struct { internal map[string]Factory lock sync.Mutex } func (m *FactoriesMap) Register(typeName string, factory Factory) error { m.lock.Lock() defer m.lock.Unlock() if len(m.internal) == 0 { m.internal = make(map[string]Factory) } _, ok := m.internal[typeName] if ok { return fmt.Errorf("adapter %q already registered", typeName) } m.internal[typeName] = factory return nil } func (m *FactoriesMap) Find(typeName string) (Factory, error) { m.lock.Lock() defer m.lock.Unlock() factory := m.internal[typeName] if factory == nil { return nil, fmt.Errorf("factory for cache adapter %q was not registered", typeName) } return factory, nil } var factories = &FactoriesMap{} func Factories() *FactoriesMap { return factories } var ( collectorsMu sync.Mutex collectors []prometheus.Collector ) // RegisterCollector registers a prometheus.Collector for a cache adapter. // It is intended to be called from init() functions in cache adapter packages. func RegisterCollector(c prometheus.Collector) { collectorsMu.Lock() defer collectorsMu.Unlock() collectors = append(collectors, c) } // Collectors returns all prometheus.Collectors registered by cache adapters. func Collectors() []prometheus.Collector { collectorsMu.Lock() defer collectorsMu.Unlock() return collectors } func getCreateAdapter(cacheConfig *cacheconfig.Config, timeout time.Duration, objectName string) (Adapter, error) { create, err := Factories().Find(cacheConfig.Type) if err != nil { return nil, fmt.Errorf("cache factory not found: %w", err) } adapter, err := create(cacheConfig, timeout, objectName) if err != nil { return nil, fmt.Errorf("cache adapter could not be initialized: %w", err) } return adapter, nil } ================================================ FILE: cache/adapter_test.go ================================================ //go:build !integration package cache import ( "errors" "testing" "time" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var defaultTimeout = 1 * time.Hour type factorizeTestCase struct { adapter Adapter errorOnFactorize error expectedError string expectedAdapter Adapter } func prepareMockedFactoriesMap() func() { oldFactories := factories factories = &FactoriesMap{} return func() { factories = oldFactories } } func makeTestFactory(test factorizeTestCase) Factory { return func(config *cacheconfig.Config, timeout time.Duration, objectName string) (Adapter, error) { if test.errorOnFactorize != nil { return nil, test.errorOnFactorize } return test.adapter, nil } } func TestCreateAdapter(t *testing.T) { adapterMock := NewMockAdapter(t) tests := map[string]factorizeTestCase{ "adapter doesn't exist": { adapter: nil, errorOnFactorize: nil, expectedAdapter: nil, expectedError: `cache factory not found: factory for cache adapter \"test\" was not registered`, }, "adapter exists": { adapter: adapterMock, errorOnFactorize: nil, expectedAdapter: adapterMock, expectedError: "", }, "adapter errors on factorize": { adapter: adapterMock, errorOnFactorize: errors.New("test error"), expectedAdapter: nil, expectedError: `cache adapter could not be initialized: test error`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { cleanupFactoriesMap := prepareMockedFactoriesMap() defer cleanupFactoriesMap() adapterTypeName := "test" if test.adapter != nil { err := factories.Register(adapterTypeName, makeTestFactory(test)) assert.NoError(t, err) } _ = factories.Register( "additional-adapter", func(config *cacheconfig.Config, timeout time.Duration, objectName string) (Adapter, error) { return NewMockAdapter(t), nil }) config := &cacheconfig.Config{ Type: adapterTypeName, } adapter, err := getCreateAdapter(config, defaultTimeout, "key") if test.expectedError == "" { assert.NoError(t, err) } else { assert.Error(t, err) } assert.Equal(t, test.expectedAdapter, adapter) }) } } func TestDoubledRegistration(t *testing.T) { adapterTypeName := "test" fakeFactory := func(config *cacheconfig.Config, timeout time.Duration, objectName string) (Adapter, error) { return nil, nil } f := &FactoriesMap{} err := f.Register(adapterTypeName, fakeFactory) assert.NoError(t, err) assert.Len(t, f.internal, 1) err = f.Register(adapterTypeName, fakeFactory) assert.Error(t, err) assert.Len(t, f.internal, 1) } ================================================ FILE: cache/azure/adapter.go ================================================ package azure import ( "context" "fmt" "net/http" "net/url" "strings" "time" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type signedURLGenerator func(ctx context.Context, name string, options *signedURLOptions) (*url.URL, error) type blobTokenGenerator func(ctx context.Context, name string, options *signedURLOptions) (string, error) type azureAdapter struct { timeout time.Duration config *cacheconfig.CacheAzureConfig objectName string generateSignedURL signedURLGenerator blobTokenGenerator blobTokenGenerator credentialsResolver credentialsResolver } // GetDownloadURL returns a blank value because we use GoCloud to handle the download. func (a *azureAdapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{} } // GetHeadURL returns a blank value because we use GoCloud to handle existence checks. func (a *azureAdapter) GetHeadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{} } // GetUploadURL returns a blank value because uploading via a pre-signed URL is // limited to 5 MB (https://learn.microsoft.com/en-us/rest/api/storageservices/put-blob-from-url?tabs=microsoft-entra-id). // We depend on GoCloud to handle the upload. func (a *azureAdapter) GetUploadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{} } // WithMetadata for Azure is a no-op. We use GoCloud and metadata is directly managed at upload time in the // cache-archiver. func (a *azureAdapter) WithMetadata(metadata map[string]string) {} func (a *azureAdapter) GetGoCloudURL(ctx context.Context, upload bool) (cache.GoCloudURL, error) { goCloudURL := cache.GoCloudURL{} if a.config.ContainerName == "" { logrus.Error("ContainerName can't be empty") return goCloudURL, fmt.Errorf("ContainerName can't be empty") } // Go Cloud omits the object name from the URL. Since object storage // providers use the URL host for the bucket name, we attach the // object name to avoid having to pass another parameter. raw := fmt.Sprintf("azblob://%s/%s", a.config.ContainerName, a.objectName) u, err := url.Parse(raw) if err != nil { logrus.WithError(err).WithField("url", raw).Errorf("error parsing blob URL") return goCloudURL, fmt.Errorf("error parsing blob URL: %q: %w", raw, err) } env, err := a.getEnv(ctx, upload) if err != nil { logrus.WithError(err).Errorf("error retrieving upload headers for GoCloud URL") return goCloudURL, err } goCloudURL.URL = u goCloudURL.Environment = env return goCloudURL, nil } func (a *azureAdapter) getEnv(ctx context.Context, upload bool) (map[string]string, error) { env := map[string]string{ "AZURE_STORAGE_ACCOUNT": a.config.AccountName, "AZURE_STORAGE_DOMAIN": a.config.StorageDomain, } token, err := a.generateSASToken(ctx, upload) // Return what we do have if the token is missing so the user // sees the right error message instead of "options.AccountName is required". if token != "" { env["AZURE_STORAGE_SAS_TOKEN"] = token } return env, err } func (a *azureAdapter) generateSASToken(ctx context.Context, upload bool) (string, error) { method := http.MethodGet if upload { method = http.MethodPut } signer := a.getSigner() if signer == nil { return "", nil } t, err := a.blobTokenGenerator(ctx, a.objectName, &signedURLOptions{ ContainerName: a.config.ContainerName, Signer: signer, Method: method, Timeout: a.timeout, }) if err != nil { logrus.WithError(err).Errorf("error generating Azure SAS token") return t, err } return t, nil } func (a *azureAdapter) getSigner() sasSigner { err := a.credentialsResolver.Resolve() if err != nil { logrus.WithError(err).Errorf("error resolving Azure credentials") return nil } signer, err := a.credentialsResolver.Signer() if err != nil { logrus.WithError(err).Errorf("error creating Azure SAS signer") return nil } return signer } func New(config *cacheconfig.Config, timeout time.Duration, objectName string) (cache.Adapter, error) { azure := config.Azure if azure == nil { return nil, fmt.Errorf("missing Azure configuration") } cr, err := credentialsResolverInitializer(azure) if err != nil { return nil, fmt.Errorf("error while initializing Azure credentials resolver: %w", err) } a := &azureAdapter{ config: azure, timeout: timeout, objectName: strings.TrimLeft(objectName, "/"), credentialsResolver: cr, generateSignedURL: presignedURL, blobTokenGenerator: getSASToken, } return a, nil } func init() { err := cache.Factories().Register("azure", New) if err != nil { panic(err) } } ================================================ FILE: cache/azure/adapter_test.go ================================================ //go:build !integration package azure import ( "context" "encoding/base64" "errors" "fmt" "net/url" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var ( accountName = "azuretest" accountKey = base64.StdEncoding.EncodeToString([]byte("12345")) containerName = "test" objectName = "key" storageDomain = "example.com" defaultTimeout = 1 * time.Hour ) func defaultAzureCache() *cacheconfig.Config { return &cacheconfig.Config{ Type: "azure", Azure: &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: cacheconfig.CacheAzureCredentials{ AccountName: accountName, AccountKey: accountKey, }, ContainerName: containerName, StorageDomain: storageDomain, }, } } type adapterOperationInvalidConfigTestCase struct { provideAzureConfig bool errorOnCredentialsResolverInitialization bool credentialsResolverResolveError bool accountName string accountKey string containerName string expectedErrorMsg string expectedGoCloudURL string } func prepareMockedCredentialsResolverInitializer(tc adapterOperationInvalidConfigTestCase) func() { oldCredentialsResolverInitializer := credentialsResolverInitializer credentialsResolverInitializer = func(config *cacheconfig.CacheAzureConfig) (*defaultCredentialsResolver, error) { if tc.errorOnCredentialsResolverInitialization { return nil, errors.New("test error") } return newDefaultCredentialsResolver(config) } return func() { credentialsResolverInitializer = oldCredentialsResolverInitializer } } func prepareMockedCredentialsResolverForInvalidConfig(t *testing.T, adapter *azureAdapter, tc adapterOperationInvalidConfigTestCase) { cr := newMockCredentialsResolver(t) resolveCall := cr.On("Resolve").Maybe() if tc.credentialsResolverResolveError { resolveCall.Return(fmt.Errorf("test error")) } else { resolveCall.Return(nil) } config := defaultAzureCache() config.Azure.CacheAzureCredentials.AccountName = tc.accountName config.Azure.CacheAzureCredentials.AccountKey = tc.accountKey config.Azure.ContainerName = tc.containerName // Always return an account key signer to avoid metadata lookups signer, err := newAccountKeySigner(config.Azure) cr.On("Signer").Return(signer, err).Maybe() adapter.credentialsResolver = cr } func testGoCloudURLWithInvalidConfig( t *testing.T, name string, tc adapterOperationInvalidConfigTestCase, adapter *azureAdapter, operation func(ctx context.Context, upload bool) (cache.GoCloudURL, error), expectedErrorMessage string, ) { t.Run(name, func(t *testing.T) { prepareMockedCredentialsResolverForInvalidConfig(t, adapter, tc) u, err := operation(t.Context(), true) if expectedErrorMessage != "" { assert.ErrorContains(t, err, expectedErrorMessage) } else { assert.NoError(t, err) } if tc.expectedGoCloudURL != "" { assert.Equal(t, tc.expectedGoCloudURL, u.URL.String()) } else { assert.Nil(t, u.URL) } }) } func testUploadEnvWithInvalidConfig( t *testing.T, name string, tc adapterOperationInvalidConfigTestCase, adapter *azureAdapter, operation func(context.Context) (map[string]string, error), ) { t.Run(name, func(t *testing.T) { prepareMockedCredentialsResolverForInvalidConfig(t, adapter, tc) u, err := operation(t.Context()) assert.NoError(t, err) assert.Equal(t, accountName, u["AZURE_STORAGE_ACCOUNT"]) assert.Equal(t, storageDomain, u["AZURE_STORAGE_DOMAIN"]) assert.NotContains(t, u, "AZURE_SAS_TOKEN") }) } func TestAdapterOperation_InvalidConfig(t *testing.T) { tests := map[string]adapterOperationInvalidConfigTestCase{ "no-azure-config": { containerName: containerName, expectedErrorMsg: "Missing Azure configuration", }, "error-on-credentials-resolver-initialization": { provideAzureConfig: true, errorOnCredentialsResolverInitialization: true, }, "credentials-resolver-resolve-error": { provideAzureConfig: true, credentialsResolverResolveError: true, containerName: containerName, expectedGoCloudURL: "azblob://test/key", }, "no-credentials": { provideAzureConfig: true, containerName: containerName, expectedGoCloudURL: "azblob://test/key", }, "no-account-key": { provideAzureConfig: true, accountName: accountName, containerName: containerName, expectedGoCloudURL: "azblob://test/key", }, "invalid-container-name": { provideAzureConfig: true, accountName: accountName, containerName: "\x00", expectedErrorMsg: "error parsing blob URL", }, "container-not-specified": { provideAzureConfig: true, accountName: "access-id", accountKey: accountKey, expectedErrorMsg: "ContainerName can't be empty", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { cleanupCredentialsResolverInitializerMock := prepareMockedCredentialsResolverInitializer(tc) defer cleanupCredentialsResolverInitializerMock() config := defaultAzureCache() config.Azure.ContainerName = tc.containerName if !tc.provideAzureConfig { config.Azure = nil } a, err := New(config, defaultTimeout, objectName) if !tc.provideAzureConfig { assert.Nil(t, a) assert.EqualError(t, err, "missing Azure configuration") return } if tc.errorOnCredentialsResolverInitialization { assert.Nil(t, a) assert.EqualError(t, err, "error while initializing Azure credentials resolver: test error") return } require.NotNil(t, a) assert.NoError(t, err) adapter, ok := a.(*azureAdapter) require.True(t, ok, "Adapter should be properly casted to *adapter type") ctx := t.Context() assert.Nil(t, adapter.GetDownloadURL(ctx).URL) assert.Nil(t, adapter.GetHeadURL(ctx).URL) assert.Nil(t, adapter.GetUploadURL(ctx).URL) testGoCloudURLWithInvalidConfig(t, "GetGoCloudURL", tc, adapter, a.GetGoCloudURL, tc.expectedErrorMsg) }) } } type adapterOperationTestCase struct { objectName string returnedURL string returnedError error expectedError string } func prepareMockedSignedURLGenerator( t *testing.T, tc adapterOperationTestCase, expectedMethod string, adapter *azureAdapter, ) { adapter.generateSignedURL = func(ctx context.Context, name string, opts *signedURLOptions) (*url.URL, error) { assert.Equal(t, containerName, opts.ContainerName) assert.Equal(t, expectedMethod, opts.Method) u, err := url.Parse(tc.returnedURL) if err != nil { return nil, err } return u, tc.returnedError } } func TestAdapterOperation(t *testing.T) { tests := map[string]adapterOperationTestCase{ "error-on-URL-signing": { objectName: objectName, returnedURL: "", returnedError: fmt.Errorf("test error"), expectedError: "error generating Azure pre-signed URL\" error=\"test error\"", }, "invalid-URL-returned": { objectName: objectName, returnedURL: "://test", returnedError: nil, expectedError: "error generating Azure pre-signed URL\" error=\"parse", }, "valid-configuration": { objectName: objectName, returnedURL: "https://myaccount.blob.core.windows.net/mycontainer/mydirectory/myfile.txt?sig=XYZ&sp=r", returnedError: nil, expectedError: "", }, "valid-configuration-with-leading-slash": { objectName: "/" + objectName, returnedURL: "https://myaccount.blob.core.windows.net/mycontainer/mydirectory/myfile.txt?sig=XYZ&sp=r", returnedError: nil, expectedError: "", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { config := defaultAzureCache() a, err := New(config, defaultTimeout, tc.objectName) require.NoError(t, err) adapter, ok := a.(*azureAdapter) require.True(t, ok, "Adapter should be properly casted to *adapter type") u, err := adapter.GetGoCloudURL(t.Context(), true) assert.NoError(t, err) assert.Equal(t, "azblob://test/key", u.URL.String()) assert.Len(t, u.Environment, 3) assert.Equal(t, accountName, u.Environment["AZURE_STORAGE_ACCOUNT"]) assert.NotEmpty(t, u.Environment["AZURE_STORAGE_SAS_TOKEN"]) assert.Empty(t, u.Environment["AZURE_STORAGE_KEY"]) assert.Equal(t, storageDomain, u.Environment["AZURE_STORAGE_DOMAIN"]) du, err := adapter.GetGoCloudURL(t.Context(), false) assert.NoError(t, err) assert.Equal(t, "azblob://test/key", du.URL.String()) assert.Len(t, du.Environment, 3) assert.Equal(t, accountName, du.Environment["AZURE_STORAGE_ACCOUNT"]) assert.NotEmpty(t, du.Environment["AZURE_STORAGE_SAS_TOKEN"]) assert.Empty(t, du.Environment["AZURE_STORAGE_KEY"]) assert.Equal(t, storageDomain, du.Environment["AZURE_STORAGE_DOMAIN"]) ctx := t.Context() assert.Nil(t, adapter.GetDownloadURL(ctx).URL) assert.Nil(t, adapter.GetHeadURL(ctx).URL) assert.Nil(t, adapter.GetUploadURL(ctx).URL) }) } } ================================================ FILE: cache/azure/azure.go ================================================ package azure import ( "context" "fmt" "net/http" "net/url" "time" "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy" "github.com/Azure/azure-sdk-for-go/sdk/azcore/to" "github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/sas" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/service" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) const DefaultAzureServer = "blob.core.windows.net" type sasSigner interface { ServiceURL() string Prepare(ctx context.Context, o *signedURLOptions) error Sign(values sas.BlobSignatureValues) (sas.QueryParameters, error) } type accountKeySigner struct { blobServiceURL string credential *service.SharedKeyCredential } type userDelegationKeySigner struct { blobServiceURL string credTransporter policy.Transporter transport *http.Transport userCredential *service.UserDelegationCredential credential *azidentity.DefaultAzureCredential } type userDelegationKeyOption func(*userDelegationKeySigner) type signedURLOptions struct { ContainerName string Signer sasSigner Method string Timeout time.Duration } // withBlobServiceEndpoint allows the caller to override the default service // URL. This should only be used in testing. func withBlobServiceEndpoint(endpoint string) userDelegationKeyOption { return func(s *userDelegationKeySigner) { s.blobServiceURL = endpoint } } // withBlobServiceTransports allows the caller to override the underlying // HTTP transport for the service URL. This should only be used in testing. func withBlobServiceTransport(transport *http.Transport) userDelegationKeyOption { return func(s *userDelegationKeySigner) { s.transport = transport } } func withDefaultCredentialTransporter(transporter policy.Transporter) userDelegationKeyOption { return func(s *userDelegationKeySigner) { s.credTransporter = transporter } } // transportAdapter wraps http.Transport to implement service.Transporter type transportAdapter struct { transport *http.Transport } func (t *transportAdapter) Do(req *http.Request) (*http.Response, error) { return t.transport.RoundTrip(req) } func presignedURL(ctx context.Context, name string, o *signedURLOptions) (*url.URL, error) { sasQueryParams, err := getSASQueryParameters(ctx, name, o) if err != nil { return nil, err } endpoint := o.Signer.ServiceURL() parts, err := sas.ParseURL(endpoint) if err != nil { return nil, err } parts.ContainerName = o.ContainerName parts.BlobName = name parts.SAS = sasQueryParams u, err := url.Parse(parts.String()) if err != nil { return nil, fmt.Errorf("failed to parse Azure URL '%s': %w", parts.String(), err) } return u, nil } func getSASToken(ctx context.Context, name string, o *signedURLOptions) (string, error) { sas, err := getSASQueryParameters(ctx, name, o) if err != nil { return "", err } return sas.Encode(), nil } func getBlobServiceURL(config *cacheconfig.CacheAzureConfig) string { domain := DefaultAzureServer if config.StorageDomain != "" { domain = config.StorageDomain } return fmt.Sprintf("https://%s.%s", config.CacheAzureCredentials.AccountName, domain) } func newAccountKeySigner(config *cacheconfig.CacheAzureConfig) (sasSigner, error) { credentials := config.CacheAzureCredentials if credentials.AccountName == "" { return nil, fmt.Errorf("missing Azure storage account name") } if credentials.AccountKey == "" { return nil, fmt.Errorf("missing Azure storage account key") } if config.ContainerName == "" { return nil, fmt.Errorf("ContainerName can't be empty") } blobServiceURL := getBlobServiceURL(config) credential, err := azblob.NewSharedKeyCredential(credentials.AccountName, credentials.AccountKey) if err != nil { return nil, fmt.Errorf("creating Azure signature: %w", err) } return &accountKeySigner{blobServiceURL: blobServiceURL, credential: credential}, nil } func newUserDelegationKeySigner(config *cacheconfig.CacheAzureConfig, options ...userDelegationKeyOption) (sasSigner, error) { if config.AccountName == "" { return nil, fmt.Errorf("no Azure storage account name provided") } blobServiceURL := getBlobServiceURL(config) signer := &userDelegationKeySigner{blobServiceURL: blobServiceURL} for _, opt := range options { opt(signer) } opts := &azidentity.DefaultAzureCredentialOptions{} if signer.credTransporter != nil { opts.ClientOptions = policy.ClientOptions{Transport: signer.credTransporter} } credential, err := azidentity.NewDefaultAzureCredential(opts) if err != nil { return nil, fmt.Errorf("failed to create Azure identity credentials: %w", err) } signer.credential = credential return signer, nil } func getSASQueryParameters(ctx context.Context, name string, o *signedURLOptions) (sas.QueryParameters, error) { serviceSASValues := generateBlobSignatureValues(name, o) err := o.Signer.Prepare(ctx, o) if err != nil { return sas.QueryParameters{}, err } return o.Signer.Sign(serviceSASValues) } func generateBlobSignatureValues(name string, o *signedURLOptions) sas.BlobSignatureValues { permissions := sas.BlobPermissions{Read: true} if o.Method == http.MethodPut { permissions = sas.BlobPermissions{Write: true} } // Set the desired SAS signature values. // See https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas return sas.BlobSignatureValues{ Protocol: sas.ProtocolHTTPS, // Users MUST use HTTPS (not HTTP) StartTime: time.Now().Add(-1 * time.Hour).UTC(), ExpiryTime: time.Now().Add(o.Timeout).UTC(), Permissions: permissions.String(), ContainerName: o.ContainerName, BlobName: name, } } func (s *accountKeySigner) ServiceURL() string { return s.blobServiceURL } func (s *accountKeySigner) Prepare(ctx context.Context, o *signedURLOptions) error { return nil } func (s *accountKeySigner) Sign(values sas.BlobSignatureValues) (sas.QueryParameters, error) { empty := sas.QueryParameters{} sas, err := values.SignWithSharedKey(s.credential) if err != nil { return empty, fmt.Errorf("creating Azure SAS: %w", err) } return sas, nil } func (s *userDelegationKeySigner) ServiceURL() string { return s.blobServiceURL } func (s *userDelegationKeySigner) Prepare(ctx context.Context, o *signedURLOptions) error { userDelegationKey, err := s.retrieveUserCredentials(ctx, o) if err != nil { return fmt.Errorf("failed to get User Delegation Key: %w", err) } s.userCredential = userDelegationKey return nil } func (s *userDelegationKeySigner) Sign(values sas.BlobSignatureValues) (sas.QueryParameters, error) { empty := sas.QueryParameters{} sas, err := values.SignWithUserDelegation(s.userCredential) if err != nil { return empty, fmt.Errorf("creating Azure SAS: %w", err) } return sas, nil } func (s *userDelegationKeySigner) retrieveUserCredentials(ctx context.Context, o *signedURLOptions) (*service.UserDelegationCredential, error) { start := time.Now().UTC() expiry := start.Add(o.Timeout) info := service.KeyInfo{ Start: to.Ptr(start.UTC().Format(sas.TimeFormat)), Expiry: to.Ptr(expiry.UTC().Format(sas.TimeFormat)), } clientOptions := &service.ClientOptions{} if s.transport != nil { clientOptions.Transport = &transportAdapter{transport: s.transport} } blobServiceClient, err := service.NewClient(s.blobServiceURL, s.credential, clientOptions) if err != nil { return nil, fmt.Errorf("failed to create Azure Blob Service Client: %w", err) } return blobServiceClient.GetUserDelegationCredential(ctx, info, nil) } ================================================ FILE: cache/azure/azure_test.go ================================================ //go:build !integration package azure import ( "bytes" "crypto/tls" "fmt" "io" "net/http" "net/http/httptest" "net/url" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type azureSigningTest struct { accountName string accountKey string storageDomain string containerName string method string endpoint string expectedErrorOnGeneration bool expectedServiceURL string } const ( mockClientInfo = "my-client" mockIDToken = "my-idt" ) type mockSTS struct{} var accessTokenRespSuccess = []byte(fmt.Sprintf(`{"access_token": "%s", "expires_in": 3600}`, "tokenValue")) func (m *mockSTS) Do(req *http.Request) (*http.Response, error) { res := &http.Response{StatusCode: http.StatusNotFound} s := strings.Split(req.URL.Path, "/") if s[len(s)-1] != "token" { return res, nil } if err := req.ParseForm(); err != nil { return nil, fmt.Errorf("mockSTS failed to parse a request body: %w", err) } if grant := req.FormValue("grant_type"); grant == "device_code" || grant == "password" { // include account info because we're authenticating a user res.Body = io.NopCloser(bytes.NewReader( []byte(fmt.Sprintf(`{"access_token":"at","expires_in": 3600,"refresh_token":"rt","client_info":%q,"id_token":%q}`, mockClientInfo, mockIDToken)), )) } else { res.Body = io.NopCloser(bytes.NewReader(accessTokenRespSuccess)) } res.StatusCode = http.StatusOK return res, nil } func TestAccountKeySigning(t *testing.T) { tests := map[string]azureSigningTest{ "missing account name": { accountKey: accountKey, containerName: "test-container", method: http.MethodGet, expectedErrorOnGeneration: true, }, "missing account key": { accountName: accountName, containerName: "test-container", method: http.MethodGet, expectedErrorOnGeneration: true, }, "GET request": { accountName: accountName, accountKey: accountKey, containerName: "test-container", method: http.MethodGet, expectedServiceURL: "https://azuretest.blob.core.windows.net", }, "GET request in custom storage domain": { accountName: accountName, accountKey: accountKey, storageDomain: "blob.core.chinacloudapi.cn", containerName: "test-container", method: http.MethodGet, expectedServiceURL: "https://azuretest.blob.core.chinacloudapi.cn", }, "PUT request": { accountName: accountName, accountKey: accountKey, containerName: "test-container", method: http.MethodPut, expectedServiceURL: "https://azuretest.blob.core.windows.net", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { credentials := &cacheconfig.CacheAzureCredentials{ AccountName: tt.accountName, AccountKey: tt.accountKey, } config := &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: *credentials, ContainerName: tt.containerName, StorageDomain: tt.storageDomain, } opts := &signedURLOptions{ ContainerName: containerName, Method: tt.method, Timeout: 1 * time.Hour, } signer, err := newAccountKeySigner(config) if tt.expectedErrorOnGeneration { assert.Error(t, err) return } require.NoError(t, err) assert.Equal(t, tt.expectedServiceURL, signer.ServiceURL()) opts.Signer = signer token, err := getSASToken(t.Context(), objectName, opts) require.NoError(t, err) q, err := url.ParseQuery(token) require.NoError(t, err) assert.Equal(t, q.Encode(), token) // Sanity check query parameters from // https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas assert.NotNil(t, q["sv"]) // SignedVersion assert.Equal(t, []string{"b"}, q["sr"]) // SignedResource (blob) assert.NotNil(t, q["st"]) // SignedStart assert.NotNil(t, q["se"]) // SignedExpiry assert.NotNil(t, q["sig"]) // Signature assert.Equal(t, []string{"https"}, q["spr"]) // SignedProtocol // SignedPermission expectedPermissionValue := "w" if tt.method == http.MethodGet { expectedPermissionValue = "r" } assert.Equal(t, []string{expectedPermissionValue}, q["sp"]) }) } } func TestUserDelegationSigning(t *testing.T) { handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { // Simulate Azure API response w.Header().Set("Content-Type", "application/xml") responseBody := ` f81d4fae-7dec-11d0-a765-00a0c91e6bf6 72f988bf-86f1-41af-91ab-2d7cd011db47 2024-09-19T00:00:00Z 2024-09-26T00:00:00Z b 2020-02-10 UDELEGATIONKEYXYZ.... rL7...ABC ` w.WriteHeader(http.StatusOK) w.Header().Set("Date", time.Now().UTC().Format(http.TimeFormat)) _, _ = w.Write([]byte(responseBody)) }) server := httptest.NewTLSServer(handler) defer server.Close() // Azure requires HTTPS to be used. Since we are setting up our own // fake API server, skip TLS verification. customTransport := &http.Transport{ TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, } tests := map[string]azureSigningTest{ "missing account name": { accountKey: accountKey, containerName: "test-container", method: http.MethodGet, expectedErrorOnGeneration: true, }, "GET request": { accountName: accountName, accountKey: accountKey, method: http.MethodGet, endpoint: server.URL, }, "PUT request": { accountName: accountName, accountKey: accountKey, method: http.MethodPut, endpoint: server.URL, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { credentials := &cacheconfig.CacheAzureCredentials{ AccountName: tt.accountName, AccountKey: tt.accountKey, } config := &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: *credentials, } opts := &signedURLOptions{ ContainerName: containerName, Method: tt.method, Timeout: 1 * time.Hour, } signer, err := newUserDelegationKeySigner(config, withDefaultCredentialTransporter(&mockSTS{}), withBlobServiceEndpoint(tt.endpoint), withBlobServiceTransport(customTransport)) if tt.expectedErrorOnGeneration { require.Error(t, err) return } require.NoError(t, err) assert.Equal(t, server.URL, signer.ServiceURL()) opts.Signer = signer token, err := getSASToken(t.Context(), objectName, opts) require.NoError(t, err) q, err := url.ParseQuery(token) require.NoError(t, err) assert.Equal(t, q.Encode(), token) // Sanity check query parameters from // https://docs.microsoft.com/en-us/rest/api/storageservices/create-service-sas assert.NotNil(t, q["sv"]) // SignedVersion assert.Equal(t, []string{"b"}, q["sr"]) // SignedResource (blob) assert.NotNil(t, q["st"]) // SignedStart assert.NotNil(t, q["se"]) // SignedExpiry assert.NotNil(t, q["sig"]) // Signature assert.Equal(t, []string{"https"}, q["spr"]) // SignedProtocol // SignedPermission expectedPermissionValue := "w" if tt.method == http.MethodGet { expectedPermissionValue = "r" } assert.Equal(t, []string{expectedPermissionValue}, q["sp"]) }) } } ================================================ FILE: cache/azure/credentials_resolver.go ================================================ package azure import ( "errors" "fmt" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type credentialsResolver interface { Resolve() error Signer() (sasSigner, error) } type defaultCredentialsResolver struct { config *cacheconfig.CacheAzureConfig } func (cr *defaultCredentialsResolver) Resolve() error { return cr.readCredentialsFromConfig() } func (cr *defaultCredentialsResolver) Credentials() *cacheconfig.CacheAzureCredentials { return &cr.config.CacheAzureCredentials } func (cr *defaultCredentialsResolver) Signer() (sasSigner, error) { if cr.config.AccountName == "" { return nil, errors.New("missing Azure storage account name") } if cr.config.ContainerName == "" { return nil, errors.New("ContainerName can't be empty") } if cr.config.CacheAzureCredentials.AccountKey != "" { return newAccountKeySigner(cr.config) } return newUserDelegationKeySigner(cr.config) } func (cr *defaultCredentialsResolver) readCredentialsFromConfig() error { if cr.config.AccountName == "" { return fmt.Errorf("config for Azure present, but account name is not configured") } return nil } func newDefaultCredentialsResolver(config *cacheconfig.CacheAzureConfig) (*defaultCredentialsResolver, error) { if config == nil { return nil, fmt.Errorf("config can't be nil") } resolver := &defaultCredentialsResolver{ config: config, } return resolver, nil } var credentialsResolverInitializer = newDefaultCredentialsResolver ================================================ FILE: cache/azure/credentials_resolver_test.go ================================================ //go:build !integration package azure import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type credentialsResolverTestCase struct { config *cacheconfig.CacheAzureConfig errorExpectedOnInitialization bool errorExpectedOnResolve bool expectedCredentials *cacheconfig.CacheAzureCredentials } type signerTestCase struct { config *cacheconfig.CacheAzureConfig errorExpectedOnSigner bool expectedSignerType string } func getCredentialsConfig(accountName string, accountKey string) *cacheconfig.CacheAzureConfig { return &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: cacheconfig.CacheAzureCredentials{ AccountName: accountName, AccountKey: accountKey, }, ContainerName: "test-container", } } func getExpectedCredentials(accountName string, accountKey string) *cacheconfig.CacheAzureCredentials { return &cacheconfig.CacheAzureCredentials{ AccountName: accountName, AccountKey: accountKey, } } func TestDefaultCredentialsResolver(t *testing.T) { cases := map[string]credentialsResolverTestCase{ "config is nil": { config: nil, errorExpectedOnInitialization: true, }, "credentials not set": { config: &cacheconfig.CacheAzureConfig{}, errorExpectedOnResolve: true, }, "credentials direct in config": { config: getCredentialsConfig(accountName, accountKey), errorExpectedOnResolve: false, expectedCredentials: getExpectedCredentials(accountName, accountKey), }, } for tn, tt := range cases { t.Run(tn, func(t *testing.T) { cr, err := newDefaultCredentialsResolver(tt.config) if tt.errorExpectedOnInitialization { assert.Error(t, err) return } require.NoError(t, err, "Error on resolver initialization is not expected") err = cr.Resolve() if tt.errorExpectedOnResolve { assert.Error(t, err) return } require.NoError(t, err, "Error on credentials resolving is not expected") assert.Equal(t, tt.expectedCredentials, cr.Credentials()) }) } } func TestSigner(t *testing.T) { cases := map[string]signerTestCase{ "account name not set": { config: getCredentialsConfig("", accountKey), errorExpectedOnSigner: true, }, "account key not set": { config: getCredentialsConfig(accountName, ""), errorExpectedOnSigner: false, expectedSignerType: "userDelegationKeySigner", }, "account name and key set": { config: getCredentialsConfig(accountName, accountKey), errorExpectedOnSigner: false, expectedSignerType: "accountKeySigner", }, } for tn, tt := range cases { t.Run(tn, func(t *testing.T) { cr, err := newDefaultCredentialsResolver(tt.config) require.NoError(t, err, "Error on resolver initialization is not expected") signer, err := cr.Signer() if tt.errorExpectedOnSigner { assert.Error(t, err) assert.Nil(t, signer) return } require.NoError(t, err, "Error on signer is not expected") if tt.expectedSignerType == "accountKeySigner" { _, ok := signer.(*accountKeySigner) assert.True(t, ok, "Signer is expected to be of accountKeySigner type") } else { _, ok := signer.(*userDelegationKeySigner) assert.True(t, ok, "Signer is expected to be of userDelegationKeySigner type") } }) } } ================================================ FILE: cache/azure/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package azure import ( "context" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/sas" mock "github.com/stretchr/testify/mock" ) // newMockSasSigner creates a new instance of mockSasSigner. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockSasSigner(t interface { mock.TestingT Cleanup(func()) }) *mockSasSigner { mock := &mockSasSigner{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockSasSigner is an autogenerated mock type for the sasSigner type type mockSasSigner struct { mock.Mock } type mockSasSigner_Expecter struct { mock *mock.Mock } func (_m *mockSasSigner) EXPECT() *mockSasSigner_Expecter { return &mockSasSigner_Expecter{mock: &_m.Mock} } // Prepare provides a mock function for the type mockSasSigner func (_mock *mockSasSigner) Prepare(ctx context.Context, o *signedURLOptions) error { ret := _mock.Called(ctx, o) if len(ret) == 0 { panic("no return value specified for Prepare") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context, *signedURLOptions) error); ok { r0 = returnFunc(ctx, o) } else { r0 = ret.Error(0) } return r0 } // mockSasSigner_Prepare_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Prepare' type mockSasSigner_Prepare_Call struct { *mock.Call } // Prepare is a helper method to define mock.On call // - ctx context.Context // - o *signedURLOptions func (_e *mockSasSigner_Expecter) Prepare(ctx interface{}, o interface{}) *mockSasSigner_Prepare_Call { return &mockSasSigner_Prepare_Call{Call: _e.mock.On("Prepare", ctx, o)} } func (_c *mockSasSigner_Prepare_Call) Run(run func(ctx context.Context, o *signedURLOptions)) *mockSasSigner_Prepare_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 *signedURLOptions if args[1] != nil { arg1 = args[1].(*signedURLOptions) } run( arg0, arg1, ) }) return _c } func (_c *mockSasSigner_Prepare_Call) Return(err error) *mockSasSigner_Prepare_Call { _c.Call.Return(err) return _c } func (_c *mockSasSigner_Prepare_Call) RunAndReturn(run func(ctx context.Context, o *signedURLOptions) error) *mockSasSigner_Prepare_Call { _c.Call.Return(run) return _c } // ServiceURL provides a mock function for the type mockSasSigner func (_mock *mockSasSigner) ServiceURL() string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for ServiceURL") } var r0 string if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } return r0 } // mockSasSigner_ServiceURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ServiceURL' type mockSasSigner_ServiceURL_Call struct { *mock.Call } // ServiceURL is a helper method to define mock.On call func (_e *mockSasSigner_Expecter) ServiceURL() *mockSasSigner_ServiceURL_Call { return &mockSasSigner_ServiceURL_Call{Call: _e.mock.On("ServiceURL")} } func (_c *mockSasSigner_ServiceURL_Call) Run(run func()) *mockSasSigner_ServiceURL_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockSasSigner_ServiceURL_Call) Return(s string) *mockSasSigner_ServiceURL_Call { _c.Call.Return(s) return _c } func (_c *mockSasSigner_ServiceURL_Call) RunAndReturn(run func() string) *mockSasSigner_ServiceURL_Call { _c.Call.Return(run) return _c } // Sign provides a mock function for the type mockSasSigner func (_mock *mockSasSigner) Sign(values sas.BlobSignatureValues) (sas.QueryParameters, error) { ret := _mock.Called(values) if len(ret) == 0 { panic("no return value specified for Sign") } var r0 sas.QueryParameters var r1 error if returnFunc, ok := ret.Get(0).(func(sas.BlobSignatureValues) (sas.QueryParameters, error)); ok { return returnFunc(values) } if returnFunc, ok := ret.Get(0).(func(sas.BlobSignatureValues) sas.QueryParameters); ok { r0 = returnFunc(values) } else { r0 = ret.Get(0).(sas.QueryParameters) } if returnFunc, ok := ret.Get(1).(func(sas.BlobSignatureValues) error); ok { r1 = returnFunc(values) } else { r1 = ret.Error(1) } return r0, r1 } // mockSasSigner_Sign_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Sign' type mockSasSigner_Sign_Call struct { *mock.Call } // Sign is a helper method to define mock.On call // - values sas.BlobSignatureValues func (_e *mockSasSigner_Expecter) Sign(values interface{}) *mockSasSigner_Sign_Call { return &mockSasSigner_Sign_Call{Call: _e.mock.On("Sign", values)} } func (_c *mockSasSigner_Sign_Call) Run(run func(values sas.BlobSignatureValues)) *mockSasSigner_Sign_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 sas.BlobSignatureValues if args[0] != nil { arg0 = args[0].(sas.BlobSignatureValues) } run( arg0, ) }) return _c } func (_c *mockSasSigner_Sign_Call) Return(queryParameters sas.QueryParameters, err error) *mockSasSigner_Sign_Call { _c.Call.Return(queryParameters, err) return _c } func (_c *mockSasSigner_Sign_Call) RunAndReturn(run func(values sas.BlobSignatureValues) (sas.QueryParameters, error)) *mockSasSigner_Sign_Call { _c.Call.Return(run) return _c } // newMockCredentialsResolver creates a new instance of mockCredentialsResolver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockCredentialsResolver(t interface { mock.TestingT Cleanup(func()) }) *mockCredentialsResolver { mock := &mockCredentialsResolver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockCredentialsResolver is an autogenerated mock type for the credentialsResolver type type mockCredentialsResolver struct { mock.Mock } type mockCredentialsResolver_Expecter struct { mock *mock.Mock } func (_m *mockCredentialsResolver) EXPECT() *mockCredentialsResolver_Expecter { return &mockCredentialsResolver_Expecter{mock: &_m.Mock} } // Resolve provides a mock function for the type mockCredentialsResolver func (_mock *mockCredentialsResolver) Resolve() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Resolve") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // mockCredentialsResolver_Resolve_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Resolve' type mockCredentialsResolver_Resolve_Call struct { *mock.Call } // Resolve is a helper method to define mock.On call func (_e *mockCredentialsResolver_Expecter) Resolve() *mockCredentialsResolver_Resolve_Call { return &mockCredentialsResolver_Resolve_Call{Call: _e.mock.On("Resolve")} } func (_c *mockCredentialsResolver_Resolve_Call) Run(run func()) *mockCredentialsResolver_Resolve_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockCredentialsResolver_Resolve_Call) Return(err error) *mockCredentialsResolver_Resolve_Call { _c.Call.Return(err) return _c } func (_c *mockCredentialsResolver_Resolve_Call) RunAndReturn(run func() error) *mockCredentialsResolver_Resolve_Call { _c.Call.Return(run) return _c } // Signer provides a mock function for the type mockCredentialsResolver func (_mock *mockCredentialsResolver) Signer() (sasSigner, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Signer") } var r0 sasSigner var r1 error if returnFunc, ok := ret.Get(0).(func() (sasSigner, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() sasSigner); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(sasSigner) } } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // mockCredentialsResolver_Signer_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Signer' type mockCredentialsResolver_Signer_Call struct { *mock.Call } // Signer is a helper method to define mock.On call func (_e *mockCredentialsResolver_Expecter) Signer() *mockCredentialsResolver_Signer_Call { return &mockCredentialsResolver_Signer_Call{Call: _e.mock.On("Signer")} } func (_c *mockCredentialsResolver_Signer_Call) Run(run func()) *mockCredentialsResolver_Signer_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockCredentialsResolver_Signer_Call) Return(sasSignerMoqParam sasSigner, err error) *mockCredentialsResolver_Signer_Call { _c.Call.Return(sasSignerMoqParam, err) return _c } func (_c *mockCredentialsResolver_Signer_Call) RunAndReturn(run func() (sasSigner, error)) *mockCredentialsResolver_Signer_Call { _c.Call.Return(run) return _c } ================================================ FILE: cache/cache.go ================================================ package cache import ( "context" "fmt" "path" "strings" "time" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type nopAdapter struct{} func (nopAdapter) GetDownloadURL(context.Context) PresignedURL { return PresignedURL{} } func (nopAdapter) GetHeadURL(context.Context) PresignedURL { return PresignedURL{} } func (nopAdapter) WithMetadata(map[string]string) {} func (nopAdapter) GetUploadURL(context.Context) PresignedURL { return PresignedURL{} } func (nopAdapter) GetGoCloudURL(ctx context.Context, upload bool) (GoCloudURL, error) { return GoCloudURL{}, nil } var createAdapter = getCreateAdapter func GetAdapter(config *cacheconfig.Config, timeout time.Duration, shortToken, projectId, key string, sharded bool) Adapter { if config == nil { return nopAdapter{} } if key == "" { logrus.Warning("Empty cache key. Skipping adapter selection.") return nopAdapter{} } // generate object path // runners get their own namespace, unless they're shared, in which case the // namespace is empty. namespace := "" if !config.GetShared() { namespace = path.Join("runner", shortToken) } basePath := path.Join(config.GetPath(), namespace, "project", projectId) // When sharded (i.e. FF_HASH_CACHE_KEYS is enabled), insert the first two // hex characters of the key as an intermediate path component. This // distributes objects across 256 distinct S3 prefixes per project, avoiding // 503 Slow Down responses caused by all cache objects sharing the same // prefix and landing on the same partition. var fullPath string if sharded { if len(key) < 2 { logrus.WithError(fmt.Errorf("cache key too short to shard (length %d)", len(key))).Error("Error while generating cache bucket.") return nopAdapter{} } fullPath = path.Join(basePath, key[:2], key) } else { fullPath = path.Join(basePath, key) } // The typical concerns regarding the use of strings.HasPrefix to detect // path traversal do not apply here. The detection here is made easier // as we're dealing with URL paths, not filepaths and we're ensuring that // the basepath has a final separator (the key can not be empty). // TestGenerateObjectName contains path traversal tests. if !strings.HasPrefix(fullPath, basePath+"/") { logrus.WithError(fmt.Errorf("computed cache path outside of project bucket. Please remove `../` from cache key")).Error("Error while generating cache bucket.") return nopAdapter{} } adapter, err := createAdapter(config, timeout, fullPath) if err != nil { logrus.WithError(err).Error("Could not create cache adapter") } if adapter == nil { return nopAdapter{} } return adapter } ================================================ FILE: cache/cache_test.go ================================================ //go:build !integration package cache import ( "context" "fmt" "net/url" "testing" "time" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type cacheOperationTest struct { key string configExists bool adapterExists bool adapterCreateError error adapterURL PresignedURL metadata map[string]string expectedURL *url.URL expectedOutput []string } func prepareFakeCreateAdapter(t *testing.T, operationName string, tc cacheOperationTest) { var adapter Adapter oldCreateAdapter := createAdapter createAdapter = func(_ *cacheconfig.Config, _ time.Duration, _ string) (Adapter, error) { return adapter, tc.adapterCreateError } t.Cleanup(func() { createAdapter = oldCreateAdapter }) if !tc.adapterExists { return } madapter := NewMockAdapter(t) adapter = madapter if tc.adapterURL.URL == nil { return } if operationName == "GetGoCloudURL" { madapter.On(operationName, mock.Anything, true).Return(GoCloudURL{URL: tc.adapterURL.URL}, nil).Once() } else { madapter.On(operationName, mock.Anything).Return(tc.adapterURL).Once() } if operationName == "GetUploadURL" { madapter.On("WithMetadata", tc.metadata).Once() } } func prepareFakeConfig(tc cacheOperationTest) *cacheconfig.Config { if !tc.configExists { return nil } config := &cacheconfig.Config{} if tc.adapterExists { config.Type = "test" } return config } func testCacheOperation( t *testing.T, operationName string, operation func(ctx context.Context, adaptor Adapter) PresignedURL, tc cacheOperationTest, ) { t.Run(operationName, func(t *testing.T) { ctx := t.Context() hook := test.NewGlobal() prepareFakeCreateAdapter(t, operationName, tc) config := prepareFakeConfig(tc) adaptor := GetAdapter(config, 3600*time.Second, "shorttoken", "10", tc.key, false) generatedURL := operation(ctx, adaptor) assert.Equal(t, tc.expectedURL, generatedURL.URL) if len(tc.expectedOutput) == 0 { assert.Len(t, hook.AllEntries(), 0) } else { for _, expectedOutput := range tc.expectedOutput { message, err := hook.LastEntry().String() require.NoError(t, err) assert.Contains(t, message, expectedOutput) } } }) } func TestCacheOperations(t *testing.T) { exampleURL, err := url.Parse("example.com") require.NoError(t, err) tests := map[string]cacheOperationTest{ "no-config": { key: "key", adapterExists: true, adapterURL: PresignedURL{}, expectedURL: nil, expectedOutput: nil, }, "key-not-specified": { configExists: true, adapterExists: true, adapterURL: PresignedURL{}, expectedURL: nil, expectedOutput: []string{"Empty cache key. Skipping adapter selection."}, }, "adapter-doesnt-exists": { key: "key", configExists: true, adapterExists: false, adapterURL: PresignedURL{URL: exampleURL}, expectedURL: nil, }, "adapter-error-on-factorization": { key: "key", configExists: true, adapterExists: true, adapterCreateError: fmt.Errorf("some creation error"), adapterURL: PresignedURL{URL: exampleURL}, expectedURL: exampleURL, expectedOutput: []string{`error="some creation error"`}, }, "adapter-exists": { key: "key", configExists: true, adapterExists: true, adapterURL: PresignedURL{URL: exampleURL}, expectedURL: exampleURL, }, "adapter-exists-with-metadata": { key: "key", configExists: true, adapterExists: true, metadata: map[string]string{"foo": "some foo"}, adapterURL: PresignedURL{URL: exampleURL}, expectedURL: exampleURL, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { testCacheOperation(t, "GetDownloadURL", func(ctx context.Context, adaptor Adapter) PresignedURL { return adaptor.GetDownloadURL(ctx) }, tc) testCacheOperation(t, "GetUploadURL", func(ctx context.Context, adaptor Adapter) PresignedURL { adaptor.WithMetadata(tc.metadata) return adaptor.GetUploadURL(ctx) }, tc) testCacheOperation(t, "GetGoCloudURL", func(ctx context.Context, adaptor Adapter) PresignedURL { u, _ := adaptor.GetGoCloudURL(ctx, true) return PresignedURL{URL: u.URL} }, tc) }) } } func defaultCacheConfig() *cacheconfig.Config { return &cacheconfig.Config{ Type: "test", } } type generateObjectNameTestCase struct { key string path string shared bool sharded bool expectedObjectName string expectedError string } func TestGenerateObjectName(t *testing.T) { tests := map[string]generateObjectNameTestCase{ "default usage": { key: "key", expectedObjectName: "runner/longtoken/project/10/key", }, "empty key": { key: "", expectedObjectName: "", expectedError: "Empty cache key", }, "short path is set": { key: "key", path: "whatever", expectedObjectName: "whatever/runner/longtoken/project/10/key", }, "multiple segment path is set": { key: "key", path: "some/other/path/goes/here", expectedObjectName: "some/other/path/goes/here/runner/longtoken/project/10/key", }, "path is empty": { key: "key", path: "", expectedObjectName: "runner/longtoken/project/10/key", }, "shared flag is set to true": { key: "key", shared: true, expectedObjectName: "project/10/key", }, "shared flag is set to false": { key: "key", shared: false, expectedObjectName: "runner/longtoken/project/10/key", }, "path traversal but within base path": { key: "../10/key", expectedObjectName: "runner/longtoken/project/10/key", }, "path traversal resolves to empty key": { key: "../10", expectedError: "computed cache path outside of project bucket", }, "path traversal escapes project namespace": { key: "../10-outside", expectedError: "computed cache path outside of project bucket", }, "sharded key uses first two chars as prefix": { key: "d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", sharded: true, expectedObjectName: "runner/longtoken/project/10/d0/d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", }, "sharded key with path prefix": { key: "d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", path: "builds", sharded: true, expectedObjectName: "builds/runner/longtoken/project/10/d0/d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", }, "sharded key with shared runner": { key: "d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", shared: true, sharded: true, expectedObjectName: "project/10/d0/d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { hook := test.NewGlobal() cache := defaultCacheConfig() cache.Path = tc.path cache.Shared = tc.shared var capturedObjectName string oldCreateAdapter := createAdapter createAdapter = func(_ *cacheconfig.Config, _ time.Duration, objectName string) (Adapter, error) { capturedObjectName = objectName return NewMockAdapter(t), nil } t.Cleanup(func() { createAdapter = oldCreateAdapter }) adapter := GetAdapter(cache, 3600*time.Second, "longtoken", "10", tc.key, tc.sharded) if tc.expectedError != "" { // The error/warning cases return a nopAdaptor and log instead of returning an error assert.IsType(t, nopAdapter{}, adapter) require.NotEmpty(t, hook.AllEntries()) message, err := hook.LastEntry().String() require.NoError(t, err) assert.Contains(t, message, tc.expectedError) } else { assert.Equal(t, tc.expectedObjectName, capturedObjectName) assert.NotEqual(t, nopAdapter{}, adapter) } }) } } ================================================ FILE: cache/cacheconfig/cacheconfig.go ================================================ package cacheconfig import ( "fmt" "net" "net/url" "strings" "github.com/minio/minio-go/v7/pkg/s3utils" "github.com/sirupsen/logrus" ) type Config struct { Type string `toml:"Type,omitempty" long:"type" env:"CACHE_TYPE" description:"Select caching method"` Path string `toml:"Path,omitempty" long:"path" env:"CACHE_PATH" description:"Name of the path to prepend to the cache URL"` Shared bool `toml:"Shared,omitempty" long:"shared" env:"CACHE_SHARED" description:"Enable cache sharing between runners."` MaxUploadedArchiveSize int64 `toml:"MaxUploadedArchiveSize,omitempty" long:"max_uploaded_archive_size" env:"CACHE_MAXIMUM_UPLOADED_ARCHIVE_SIZE" description:"Limit the size of the cache archive being uploaded to cloud storage, in bytes."` S3 *CacheS3Config `toml:"s3,omitempty" json:"s3,omitempty" namespace:"s3"` GCS *CacheGCSConfig `toml:"gcs,omitempty" json:"gcs,omitempty" namespace:"gcs"` Azure *CacheAzureConfig `toml:"azure,omitempty" json:"azure,omitempty" namespace:"azure"` } func (c *Config) GetPath() string { return c.Path } func (c *Config) GetShared() bool { return c.Shared } type CacheS3Config struct { ServerAddress string `toml:"ServerAddress,omitempty" long:"server-address" env:"CACHE_S3_SERVER_ADDRESS" description:"A host:port to the used S3-compatible server"` AccessKey string `toml:"AccessKey,omitempty" long:"access-key" env:"CACHE_S3_ACCESS_KEY" description:"S3 Access Key"` SecretKey string `toml:"SecretKey,omitempty" long:"secret-key" env:"CACHE_S3_SECRET_KEY" description:"S3 Secret Key"` SessionToken string `toml:"SessionToken,omitempty" long:"session-token" env:"CACHE_S3_SESSION_TOKEN" description:"S3 Session Token"` BucketName string `toml:"BucketName,omitempty" long:"bucket-name" env:"CACHE_S3_BUCKET_NAME" description:"Name of the bucket where cache will be stored"` BucketLocation string `toml:"BucketLocation,omitempty" long:"bucket-location" env:"CACHE_S3_BUCKET_LOCATION" description:"Name of S3 region"` Insecure bool `toml:"Insecure,omitempty" long:"insecure" env:"CACHE_S3_INSECURE" description:"Use insecure mode (without https)"` AuthenticationType S3AuthType `toml:"AuthenticationType,omitempty" long:"authentication_type" env:"CACHE_S3_AUTHENTICATION_TYPE" description:"IAM or credentials"` ServerSideEncryption string `toml:"ServerSideEncryption,omitempty" long:"server-side-encryption" env:"CACHE_S3_SERVER_SIDE_ENCRYPTION" description:"Server side encryption type (S3, or KMS)"` ServerSideEncryptionKeyID string `toml:"ServerSideEncryptionKeyID,omitempty" long:"server-side-encryption-key-id" env:"CACHE_S3_SERVER_SIDE_ENCRYPTION_KEY_ID" description:"Server side encryption key ID (alias or Key ID or Key ARN)"` DualStack *bool `toml:"DualStack,omitempty" long:"dual-stack" env:"CACHE_S3_DUAL_STACK" description:"Enable dual-stack (IPv4 and IPv6) endpoints (default: true)" jsonschema:"oneof_type=boolean;null"` PathStyle *bool `toml:"PathStyle,omitempty" long:"path-style" env:"CACHE_S3_PATH_STYLE" description:"Use path style access (default: false)" jsonschema:"oneof_type=boolean;null"` Accelerate bool `toml:"Accelerate,omitempty" long:"accelerate" env:"CACHE_S3_ACCELERATE" description:"Enable S3 Transfer Acceleration"` RoleARN string `toml:"RoleARN,omitempty" long:"role-arn" env:"CACHE_S3_ROLE_ARN" description:"Role ARN for transferring cache to S3"` UploadRoleARN string `toml:"UploadRoleARN,omitempty" long:"upload-role-arn" env:"CACHE_S3_UPLOAD_ROLE_ARN" description:"Role ARN for uploading cache to S3"` AssumeRoleMaxConcurrency int `toml:"AssumeRoleMaxConcurrency,omitempty" long:"assume-role-max-concurrency" env:"CACHE_S3_ASSUME_ROLE_MAX_CONCURRENCY" description:"Maximum concurrent AssumeRole requests to AWS STS (default: 5, -1 to disable limit)"` DisableAssumeRoleCredentialsCaching bool `toml:"DisableAssumeRoleCredentialsCaching,omitempty" long:"disable-assume-role-credentials-caching" env:"CACHE_S3_DISABLE_ASSUME_ROLE_CREDENTIALS_CACHING" description:"Disable in-process caching of AssumeRole credentials"` } type S3AuthType string const ( S3AuthTypeAccessKey S3AuthType = "access-key" S3AuthTypeIAM S3AuthType = "iam" ) type S3EncryptionType string const ( S3EncryptionTypeNone S3EncryptionType = "" S3EncryptionTypeAes256 S3EncryptionType = "S3" S3EncryptionTypeKms S3EncryptionType = "KMS" S3EncryptionTypeDsseKms S3EncryptionType = "DSSE-KMS" ) func (c *CacheS3Config) AuthType() S3AuthType { authType := S3AuthType(strings.ToLower(string(c.AuthenticationType))) switch authType { case S3AuthTypeAccessKey, S3AuthTypeIAM: return authType } if authType != "" { return "" } if c.ServerAddress == "" || c.AccessKey == "" || c.SecretKey == "" { return S3AuthTypeIAM } return S3AuthTypeAccessKey } func (c *CacheS3Config) EncryptionType() S3EncryptionType { encryptionType := S3EncryptionType(strings.ToUpper(c.ServerSideEncryption)) switch encryptionType { case "": return S3EncryptionTypeNone case "S3", "AES256": return S3EncryptionTypeAes256 case "KMS", "AWS:KMS": return S3EncryptionTypeKms case "DSSE-KMS", "AWS:KMS:DSSE": return S3EncryptionTypeDsseKms } logrus.Warnf("unknown ServerSideEncryption value: %s", encryptionType) return S3EncryptionTypeNone } func (c *CacheS3Config) GetEndpoint() string { if c.ServerAddress == "" { return "" } scheme := "https" if c.Insecure { scheme = "http" } host, port, err := net.SplitHostPort(c.ServerAddress) if err != nil { // If SplitHostPort fails, it means there's no port specified // so we can use the ServerAddress as-is. return fmt.Sprintf("%s://%s", scheme, c.ServerAddress) } // Omit canonical ports if (scheme == "https" && port == "443") || (scheme == "http" && port == "80") { return fmt.Sprintf("%s://%s", scheme, host) } return fmt.Sprintf("%s://%s:%s", scheme, host, port) } func (c *CacheS3Config) GetEndpointURL() *url.URL { endpoint := c.GetEndpoint() if endpoint == "" { return nil } u, err := url.Parse(endpoint) if err != nil { logrus.Errorf("error parsing endpoint URL: %v", err) return nil } return u } // PathStyleEnabled() will return true if the endpoint needs to use // the legacy, path-style access to S3. If the value is not specified, // it will auto-detect and return false if the server address appears // to be for AWS or Google. Otherwise, PathStyleEnabled() will return false. func (c *CacheS3Config) PathStyleEnabled() bool { // Preserve the previous behavior of auto-detection by default if c.PathStyle == nil { u := c.GetEndpointURL() if u == nil { return false } return !s3utils.IsVirtualHostSupported(*u, c.BucketName) } return *c.PathStyle } func (c *CacheS3Config) DualStackEnabled() bool { if c.DualStack == nil { return true } return *c.DualStack } type CacheGCSCredentials struct { AccessID string `toml:"AccessID,omitempty" long:"access-id" env:"CACHE_GCS_ACCESS_ID" description:"ID of GCP Service Account used to access the storage"` PrivateKey string `toml:"PrivateKey,omitempty" long:"private-key" env:"CACHE_GCS_PRIVATE_KEY" description:"Private key used to sign GCS requests"` } type CacheGCSConfig struct { CacheGCSCredentials CredentialsFile string `toml:"CredentialsFile,omitempty" long:"credentials-file" env:"GOOGLE_APPLICATION_CREDENTIALS" description:"File with GCP credentials, containing AccessID and PrivateKey"` BucketName string `toml:"BucketName,omitempty" long:"bucket-name" env:"CACHE_GCS_BUCKET_NAME" description:"Name of the bucket where cache will be stored"` UniverseDomain string `toml:"UniverseDomain,omitempty" long:"universe-domain" env:"CACHE_GCS_UNIVERSE_DOMAIN" description:"Universe Domain for GCS requests (e.g., googleapis.com for public cloud, or a custom universe domain)"` } type CacheAzureCredentials struct { AccountName string `toml:"AccountName,omitempty" long:"account-name" env:"CACHE_AZURE_ACCOUNT_NAME" description:"Account name for Azure Blob Storage"` AccountKey string `toml:"AccountKey,omitempty" long:"account-key" env:"CACHE_AZURE_ACCOUNT_KEY" description:"Access key for Azure Blob Storage"` } type CacheAzureConfig struct { CacheAzureCredentials ContainerName string `toml:"ContainerName,omitempty" long:"container-name" env:"CACHE_AZURE_CONTAINER_NAME" description:"Name of the Azure container where cache will be stored"` StorageDomain string `toml:"StorageDomain,omitempty" long:"storage-domain" env:"CACHE_AZURE_STORAGE_DOMAIN" description:"Domain name of the Azure storage (e.g. blob.core.windows.net)"` } ================================================ FILE: cache/cacheconfig/cacheconfig_test.go ================================================ //go:build !integration package cacheconfig_test import ( "testing" "github.com/BurntSushi/toml" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/common" ) func TestCacheGCSConfig_UniverseDomain(t *testing.T) { tests := map[string]struct { config string expectedDomain string validateConfig func(t *testing.T, config *common.Config) }{ "universe domain not set": { config: ` [[runners]] [runners.cache.gcs] BucketName = "test-bucket" `, expectedDomain: "", }, "universe domain set to googleapis.com": { config: ` [[runners]] [runners.cache.gcs] BucketName = "test-bucket" UniverseDomain = "googleapis.com" `, expectedDomain: "googleapis.com", }, "universe domain set to custom universe": { config: ` [[runners]] [runners.cache.gcs] BucketName = "test-bucket" UniverseDomain = "custom.universe.com" `, expectedDomain: "custom.universe.com", }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { cfg := common.NewConfig() _, err := toml.Decode(tt.config, cfg) assert.NoError(t, err) require.Len(t, cfg.Runners, 1) require.NotNil(t, cfg.Runners[0].Cache) require.NotNil(t, cfg.Runners[0].Cache.GCS) assert.Equal(t, tt.expectedDomain, cfg.Runners[0].Cache.GCS.UniverseDomain) }) } } func TestCacheS3Config_AuthType(t *testing.T) { tests := map[string]struct { s3 cacheconfig.CacheS3Config authType cacheconfig.S3AuthType }{ "Everything is empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "", AccessKey: "", SecretKey: "", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "Both AccessKey & SecretKey are empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", AccessKey: "", SecretKey: "", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "SecretKey is empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", AccessKey: "TOKEN", SecretKey: "", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "AccessKey is empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", AccessKey: "", SecretKey: "TOKEN", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "ServerAddress is empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "", AccessKey: "TOKEN", SecretKey: "TOKEN", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "ServerAddress & AccessKey are empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "", AccessKey: "", SecretKey: "TOKEN", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "ServerAddress & SecretKey are empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "", AccessKey: "TOKEN", SecretKey: "", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "Nothing is empty": { s3: cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", AccessKey: "TOKEN", SecretKey: "TOKEN", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeAccessKey, }, "IAM set as auth type": { s3: cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", AccessKey: "TOKEN", SecretKey: "TOKEN", AuthenticationType: cacheconfig.S3AuthTypeIAM, BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeIAM, }, "Root credentials set as auth type": { s3: cacheconfig.CacheS3Config{ AccessKey: "TOKEN", SecretKey: "TOKEN", AuthenticationType: cacheconfig.S3AuthTypeAccessKey, BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeAccessKey, }, "Explicitly set but lowercase auth type": { s3: cacheconfig.CacheS3Config{ AccessKey: "TOKEN", SecretKey: "TOKEN", AuthenticationType: "access-key", BucketName: "name", BucketLocation: "us-east-1a", }, authType: cacheconfig.S3AuthTypeAccessKey, }, "Explicitly set invalid auth type": { s3: cacheconfig.CacheS3Config{ AccessKey: "TOKEN", SecretKey: "TOKEN", AuthenticationType: "invalid", BucketName: "name", BucketLocation: "us-east-1a", }, authType: "", }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tt.s3.AuthType(), tt.authType) }) } } func TestCacheS3Config_DualStack(t *testing.T) { useDualStack := true disableDualStack := false tests := map[string]struct { s3 cacheconfig.CacheS3Config expected bool }{ "Dual Stack omitted": { s3: cacheconfig.CacheS3Config{}, expected: true, }, "Dual Stack set to true": { s3: cacheconfig.CacheS3Config{DualStack: &useDualStack}, expected: true, }, "Dual Stack set to false": { s3: cacheconfig.CacheS3Config{DualStack: &disableDualStack}, expected: false, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tt.expected, tt.s3.DualStackEnabled()) }) } } func TestCacheS3Config_Encryption(t *testing.T) { testARN := "aws:arn:::1234" tests := map[string]struct { s3 cacheconfig.CacheS3Config expectedEncryptionType cacheconfig.S3EncryptionType expectedKeyID string }{ "no encryption": { s3: cacheconfig.CacheS3Config{}, expectedEncryptionType: cacheconfig.S3EncryptionTypeNone, }, "S3 encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "S3"}, expectedEncryptionType: cacheconfig.S3EncryptionTypeAes256, }, "unknown encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "BLAH"}, expectedEncryptionType: cacheconfig.S3EncryptionTypeNone, }, "AES256 encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "aes256"}, expectedEncryptionType: cacheconfig.S3EncryptionTypeAes256, }, "KMS encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "kms", ServerSideEncryptionKeyID: testARN}, expectedEncryptionType: cacheconfig.S3EncryptionTypeKms, expectedKeyID: testARN, }, "AWS:KMS encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "aws:kms", ServerSideEncryptionKeyID: testARN}, expectedEncryptionType: cacheconfig.S3EncryptionTypeKms, expectedKeyID: testARN, }, "DSSE-KMS encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "DSSE-KMS", ServerSideEncryptionKeyID: testARN}, expectedEncryptionType: cacheconfig.S3EncryptionTypeDsseKms, expectedKeyID: testARN, }, "aws:kms:dsse encryption": { s3: cacheconfig.CacheS3Config{ServerSideEncryption: "aws:kms:dsse", ServerSideEncryptionKeyID: testARN}, expectedEncryptionType: cacheconfig.S3EncryptionTypeDsseKms, expectedKeyID: testARN, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tt.expectedEncryptionType, tt.s3.EncryptionType()) assert.Equal(t, tt.expectedKeyID, tt.s3.ServerSideEncryptionKeyID) }) } } func TestCacheS3Config_Endpoint(t *testing.T) { disabled := false tests := map[string]struct { s3 cacheconfig.CacheS3Config expected string expectedPathStyle bool }{ "no server address": { s3: cacheconfig.CacheS3Config{}, expected: "", expectedPathStyle: false, }, "bad hostname": { s3: cacheconfig.CacheS3Config{ServerAddress: "local\x00host:8080"}, expected: "", expectedPathStyle: false, }, "HTTPS server address": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:8080"}, expected: "https://minio.example.com:8080", expectedPathStyle: true, }, "HTTP server address": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:8080", Insecure: true}, expected: "http://minio.example.com:8080", expectedPathStyle: true, }, "AWS us-east-2 endpoint": { s3: cacheconfig.CacheS3Config{ServerAddress: "s3.us-east-2.amazonaws.com"}, expected: "https://s3.us-east-2.amazonaws.com", expectedPathStyle: false, }, "AWS us-east-2 endpoint with bucket": { s3: cacheconfig.CacheS3Config{ServerAddress: "my-bucket.s3.us-east-2.amazonaws.com", BucketName: "my-bucket", BucketLocation: "us-east-2"}, expected: "https://my-bucket.s3.us-east-2.amazonaws.com", expectedPathStyle: true, }, "AWS FIPS endpoint": { s3: cacheconfig.CacheS3Config{ServerAddress: "s3-fips.us-west-1.amazonaws.com"}, expected: "https://s3-fips.us-west-1.amazonaws.com", expectedPathStyle: false, }, "Google endpoint": { s3: cacheconfig.CacheS3Config{ServerAddress: "storage.googleapis.com"}, expected: "https://storage.googleapis.com", expectedPathStyle: false, }, "Custom HTTPS server on standard port": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:443", PathStyle: &disabled}, expected: "https://minio.example.com", expectedPathStyle: false, }, "Custom HTTP server on standard port": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:80", Insecure: true, PathStyle: &disabled}, expected: "http://minio.example.com", expectedPathStyle: false, }, "Custom HTTPS server on HTTP port": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:80", PathStyle: &disabled}, expected: "https://minio.example.com:80", expectedPathStyle: false, }, "Custom HTTPS server with path style disabled": { s3: cacheconfig.CacheS3Config{ServerAddress: "minio.example.com:8080", PathStyle: &disabled}, expected: "https://minio.example.com:8080", expectedPathStyle: false, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tt.expectedPathStyle, tt.s3.PathStyleEnabled()) if tt.expected != "" { assert.Equal(t, tt.expected, tt.s3.GetEndpoint()) assert.Equal(t, tt.expected, tt.s3.GetEndpointURL().String()) } else { assert.Nil(t, tt.s3.GetEndpointURL()) } }) } } ================================================ FILE: cache/cachekey/cachekey.go ================================================ package cachekey import ( "fmt" "path" "strings" "unicode" ) // normaliser decodes URL-encoded slashes and dots, and converts backslashes to // forward slashes in a single pass. var normaliser = strings.NewReplacer( "%2f", "/", "%2F", "/", "%2e", ".", "%2E", ".", `\`, "/", ) // Sanitize validates and normalises a cache key. // Cache keys may contain path separators. The function: // - decodes URL-encoded '/' (%2f) and '.' (%2e) characters // - replaces all '\' with '/' // - resolves path traversals (., ..) within a virtual root // - strips trailing whitespace from the rightmost path segments, // removing any that become empty after trimming func Sanitize(cacheKey string) (string, error) { if cacheKey == "" { return "", nil } // Decode percent-encoded chars and normalise separators, then // resolve traversals against a virtual root so ".." can never // escape beyond the root. cleaned := path.Clean("/" + normaliser.Replace(cacheKey)) // Strip the leading "/" we added, split into segments, then walk // backwards trimming trailing whitespace from the rightmost // segments—dropping any that become empty. parts := strings.Split(cleaned[1:], "/") n := len(parts) for n > 0 { parts[n-1] = strings.TrimRightFunc(parts[n-1], unicode.IsSpace) if parts[n-1] != "" { break } n-- } key := strings.Join(parts[:n], "/") if key == "" { return "", fmt.Errorf("cache key %q could not be sanitized", cacheKey) } return key, nil } ================================================ FILE: cache/cachekey/cachekey_test.go ================================================ //go:build !integration package cachekey import ( "fmt" "strings" "testing" "github.com/stretchr/testify/assert" ) func TestSanitize(t *testing.T) { tests := []struct { rawKey string expectedKey string wantErr bool }{ // ── Empty / identity ──────────────────────────────────────── {rawKey: ""}, {rawKey: "fallback_key", expectedKey: "fallback_key"}, {rawKey: "some-job/some-ref", expectedKey: "some-job/some-ref"}, {rawKey: ".../....", expectedKey: ".../...."}, {rawKey: "...", expectedKey: "..."}, // ── Trailing whitespace / slashes / backslashes ───────────── {rawKey: "fallback_key/", expectedKey: "fallback_key"}, {rawKey: "fallback_key ", expectedKey: "fallback_key"}, {rawKey: "fallback_key\\", expectedKey: "fallback_key"}, {rawKey: "fallback_key/ \\", expectedKey: "fallback_key"}, {rawKey: "fallback_key/ / \\ \\", expectedKey: "fallback_key"}, {rawKey: "fallback_key/o", expectedKey: "fallback_key/o"}, {rawKey: "fallback_key / \\o", expectedKey: "fallback_key / /o"}, {rawKey: "\t foo bar \t\r", expectedKey: "\t foo bar"}, {rawKey: " foo / bar ", expectedKey: " foo / bar"}, {rawKey: "foo\r", expectedKey: "foo"}, {rawKey: "foo\t", expectedKey: "foo"}, {rawKey: "foo \t \r ", expectedKey: "foo"}, // ── Completely unsanitisable ──────────────────────────────── {rawKey: "\\", wantErr: true}, {rawKey: "\\.", wantErr: true}, {rawKey: "/", wantErr: true}, {rawKey: " ", wantErr: true}, {rawKey: ".", wantErr: true}, {rawKey: "..", wantErr: true}, {rawKey: " / ", wantErr: true}, {rawKey: "//", wantErr: true}, {rawKey: `//\`, wantErr: true}, {rawKey: "../.", wantErr: true}, {rawKey: "foo\\bar\\..\\..", wantErr: true}, {rawKey: "foo/bar/../..", wantErr: true}, {rawKey: " \t\r\n", wantErr: true}, // ── URL-encoded slashes (%2f / %2F) ──────────────────────── {rawKey: "something %2F something", expectedKey: "something / something"}, {rawKey: "something %2f something", expectedKey: "something / something"}, {rawKey: "some%2f../job/some/ref/.", expectedKey: "job/some/ref"}, // ── URL-encoded dots (%2e / %2E) ─────────────────────────── {rawKey: "%2E", wantErr: true}, {rawKey: "%2E%2E", wantErr: true}, {rawKey: "%2E%2E%2E", expectedKey: "..."}, {rawKey: "%2e", wantErr: true}, {rawKey: "%2e%2E", wantErr: true}, {rawKey: ".%2E", wantErr: true}, {rawKey: "%2e.", wantErr: true}, {rawKey: "%2E%2e%2E", expectedKey: "..."}, // %5C is left as-is (literal percent-encoded backslash is fine). {rawKey: "%5C", expectedKey: "%5C"}, {rawKey: "%5c", expectedKey: "%5c"}, // ── Forward-slash path traversal ──────────────────────────── {rawKey: "foo/./bar", expectedKey: "foo/bar"}, {rawKey: "foo/blipp/../bar", expectedKey: "foo/bar"}, {rawKey: "/foo/bar", expectedKey: "foo/bar"}, {rawKey: "//foo/bar", expectedKey: "foo/bar"}, {rawKey: "./foo/bar", expectedKey: "foo/bar"}, {rawKey: "../foo/bar", expectedKey: "foo/bar"}, {rawKey: ".../foo/bar", expectedKey: ".../foo/bar"}, {rawKey: "foo/bar/..", expectedKey: "foo"}, {rawKey: "foo/bar/../../../.././blerp", expectedKey: "blerp"}, {rawKey: "a/b/c/../../d", expectedKey: "a/d"}, // ── Backslash path traversal ──────────────────────────────── {rawKey: `job\name/git\ref`, expectedKey: "job/name/git/ref"}, {rawKey: "foo\\.\\bar", expectedKey: "foo/bar"}, {rawKey: "foo\\blipp\\..\\bar", expectedKey: "foo/bar"}, {rawKey: "\\foo\\bar", expectedKey: "foo/bar"}, {rawKey: "\\\\foo\\bar", expectedKey: "foo/bar"}, {rawKey: ".\\foo\\bar", expectedKey: "foo/bar"}, {rawKey: "..\\foo\\bar", expectedKey: "foo/bar"}, {rawKey: "...\\foo\\bar", expectedKey: ".../foo/bar"}, {rawKey: "foo\\bar\\..", expectedKey: "foo"}, {rawKey: "foo\\bar\\..\\..\\..\\..\\.\\blerp", expectedKey: "blerp"}, // ── Space-only segments & misc ────────────────────────────── {rawKey: "foo/ /bar", expectedKey: "foo/ /bar"}, {rawKey: "foo/ /", expectedKey: "foo"}, {rawKey: "foo/ / /", expectedKey: "foo"}, } for i, tt := range tests { name := fmt.Sprintf("%d:%q", i, tt.rawKey) t.Run(name, func(t *testing.T) { actual, err := Sanitize(tt.rawKey) if tt.wantErr { assert.Error(t, err) } else { assert.NoError(t, err) } assert.Equal(t, tt.expectedKey, actual) }) } } // TestSanitizeInvariants checks properties that must hold for every sanitised // key, regardless of input. func TestSanitizeInvariants(t *testing.T) { cases := []string{ "a", "a/b", "../a", "a/../b", "a/./b", "a\\b", `a\..\\b`, "/a/b/", " a ", "...", "%2e%2e/%2f", "a/b/c/../../d/e", } for _, raw := range cases { t.Run(raw, func(t *testing.T) { key, _ := Sanitize(raw) if key == "" { return // unsanitisable, nothing to check } assert.False(t, strings.HasPrefix(key, "/"), "must not start with /") assert.False(t, key == ".." || strings.HasPrefix(key, "../"), "must not start with .. segment") assert.False(t, strings.Contains(key, `\`), "must not contain backslash") assert.False(t, strings.HasSuffix(key, " "), "must not end with space") assert.False(t, strings.HasSuffix(key, "/"), "must not end with /") // No segment should be "." or ".." for _, seg := range strings.Split(key, "/") { assert.NotEqual(t, ".", seg, "must not contain '.' segment") assert.NotEqual(t, "..", seg, "must not contain '..' segment") } }) } } // TestSanitizeIdempotent verifies that sanitising an already-clean key // returns it unchanged with no error. func TestSanitizeIdempotent(t *testing.T) { inputs := []string{ "fallback_key", "some-job/some-ref", "a/b/c", "...", ".../foo/bar", } for _, raw := range inputs { t.Run(raw, func(t *testing.T) { first, err1 := Sanitize(raw) assert.NoError(t, err1) second, err2 := Sanitize(first) assert.NoError(t, err2) assert.Equal(t, first, second, "sanitise should be idempotent") }) } } ================================================ FILE: cache/credentials_adapter.go ================================================ package cache import ( "fmt" "sync" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type CredentialsAdapter interface { GetCredentials() map[string]string } var credentialsFactories = &CredentialsFactoriesMap{} func CredentialsFactories() *CredentialsFactoriesMap { return credentialsFactories } type CredentialsFactory func(config *cacheconfig.Config) (CredentialsAdapter, error) type CredentialsFactoriesMap struct { internal map[string]CredentialsFactory lock sync.Mutex } func (m *CredentialsFactoriesMap) Register(typeName string, factory CredentialsFactory) error { m.lock.Lock() defer m.lock.Unlock() if len(m.internal) == 0 { m.internal = make(map[string]CredentialsFactory) } _, ok := m.internal[typeName] if ok { return fmt.Errorf("credentials adapter %q already registered", typeName) } m.internal[typeName] = factory return nil } func (m *CredentialsFactoriesMap) Find(typeName string) (CredentialsFactory, error) { m.lock.Lock() defer m.lock.Unlock() factory := m.internal[typeName] if factory == nil { return nil, fmt.Errorf("factory for credentials adapter %q not registered", typeName) } return factory, nil } func CreateCredentialsAdapter(cacheConfig *cacheconfig.Config) (CredentialsAdapter, error) { create, err := CredentialsFactories().Find(cacheConfig.Type) if err != nil { return nil, fmt.Errorf("credentials adapter factory not found: %w", err) } adapter, err := create(cacheConfig) if err != nil { return nil, fmt.Errorf("credentials adapter could not be initialized: %w", err) } return adapter, nil } ================================================ FILE: cache/credentials_adapter_test.go ================================================ //go:build !integration package cache import ( "errors" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type credentialsFactoryTestCase struct { adapter CredentialsAdapter errorOnFactorize error expectedError string expectedAdapter CredentialsAdapter } func prepareMockedCredentialsFactoriesMap() func() { oldFactories := credentialsFactories credentialsFactories = &CredentialsFactoriesMap{} return func() { credentialsFactories = oldFactories } } func makeTestCredentialsFactory(test credentialsFactoryTestCase) CredentialsFactory { return func(config *cacheconfig.Config) (CredentialsAdapter, error) { if test.errorOnFactorize != nil { return nil, test.errorOnFactorize } return test.adapter, nil } } func TestCreateCredentialsAdapter(t *testing.T) { adapterMock := NewMockCredentialsAdapter(t) tests := map[string]credentialsFactoryTestCase{ "adapter doesn't exist": { adapter: nil, errorOnFactorize: nil, expectedAdapter: nil, expectedError: `credentials adapter factory not found: factory for credentials adapter "test" not registered`, }, "adapter exists": { adapter: adapterMock, errorOnFactorize: nil, expectedAdapter: adapterMock, expectedError: "", }, "adapter errors on factorize": { adapter: adapterMock, errorOnFactorize: errors.New("test error"), expectedAdapter: nil, expectedError: `credentials adapter could not be initialized: test error`, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { cleanupFactoriesMap := prepareMockedCredentialsFactoriesMap() defer cleanupFactoriesMap() adapterTypeName := "test" if tc.adapter != nil { err := credentialsFactories.Register(adapterTypeName, makeTestCredentialsFactory(tc)) assert.NoError(t, err) } _ = credentialsFactories.Register( "additional-adapter", func(config *cacheconfig.Config) (CredentialsAdapter, error) { return NewMockCredentialsAdapter(t), nil }) config := &cacheconfig.Config{ Type: adapterTypeName, } adapter, err := CreateCredentialsAdapter(config) if tc.expectedError == "" { assert.NoError(t, err) } else { assert.EqualError(t, err, tc.expectedError) } assert.Equal(t, tc.expectedAdapter, adapter) }) } } func TestCredentialsFactoryDoubledRegistration(t *testing.T) { adapterTypeName := "test" fakeFactory := func(config *cacheconfig.Config) (CredentialsAdapter, error) { return nil, nil } f := &CredentialsFactoriesMap{} err := f.Register(adapterTypeName, fakeFactory) assert.NoError(t, err) assert.Len(t, f.internal, 1) err = f.Register(adapterTypeName, fakeFactory) assert.Error(t, err) assert.Len(t, f.internal, 1) } ================================================ FILE: cache/gcs/adapter.go ================================================ package gcs import ( "context" "fmt" "net/http" "net/url" "strings" "time" "cloud.google.com/go/storage" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type signedURLGenerator func(bucket string, name string, opts *storage.SignedURLOptions) (string, error) type gcsAdapter struct { timeout time.Duration config *cacheconfig.CacheGCSConfig objectName string maxUploadedArchiveSize int64 metadata map[string]string generateSignedURL signedURLGenerator credentialsResolver credentialsResolver } func (a *gcsAdapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{URL: a.presignURL(ctx, http.MethodGet, "")} } func (a *gcsAdapter) GetHeadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{URL: a.presignURL(ctx, http.MethodHead, "")} } func (a *gcsAdapter) GetUploadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{ URL: a.presignURL(ctx, http.MethodPut, "application/octet-stream"), Headers: a.GetUploadHeaders(), } } func (a *gcsAdapter) GetUploadHeaders() http.Header { headers := http.Header{} if a.maxUploadedArchiveSize > 0 { headers.Set("X-Goog-Content-Length-Range", fmt.Sprintf("0,%d", a.maxUploadedArchiveSize)) } for k, v := range a.metadata { headers.Set("x-goog-meta-"+k, v) } return headers } func (a *gcsAdapter) GetGoCloudURL(_ context.Context, _ bool) (cache.GoCloudURL, error) { return cache.GoCloudURL{}, nil } func (a *gcsAdapter) WithMetadata(metadata map[string]string) { a.metadata = metadata } func (a *gcsAdapter) presignURL(ctx context.Context, method string, contentType string) *url.URL { if a.config.BucketName == "" { logrus.Error("BucketName can't be empty") return nil } err := a.credentialsResolver.Resolve() if err != nil { logrus.Errorf("error while resolving GCS credentials: %v", err) return nil } credentials := a.credentialsResolver.Credentials() suo := storage.SignedURLOptions{ GoogleAccessID: credentials.AccessID, Method: method, Expires: time.Now().Add(a.timeout), ContentType: contentType, } if method == http.MethodPut { suo.Headers = []string{} for key, values := range a.GetUploadHeaders() { suo.Headers = append(suo.Headers, fmt.Sprintf("%s:%s", key, strings.Join(values, ";"))) } } if credentials.PrivateKey != "" { suo.PrivateKey = []byte(credentials.PrivateKey) } else { logrus.Debug("No private key was provided for GCS cache. Attempting to use instance credentials.") suo.SignBytes = a.credentialsResolver.SignBytesFunc(ctx) } rawURL, err := a.generateSignedURL(a.config.BucketName, a.objectName, &suo) if err != nil { logrus.Errorf("error while generating GCS pre-signed URL: %v", err) return nil } URL, err := url.Parse(rawURL) if err != nil { logrus.Errorf("error while parsing generated URL: %v", err) return nil } return URL } func New(config *cacheconfig.Config, timeout time.Duration, objectName string) (cache.Adapter, error) { gcs := config.GCS if gcs == nil { return nil, fmt.Errorf("missing GCS configuration") } cr, err := credentialsResolverInitializer(gcs) if err != nil { return nil, fmt.Errorf("error while initializing GCS credentials resolver: %w", err) } a := &gcsAdapter{ config: gcs, timeout: timeout, objectName: objectName, maxUploadedArchiveSize: config.MaxUploadedArchiveSize, generateSignedURL: storage.SignedURL, credentialsResolver: cr, } return a, nil } func init() { err := cache.Factories().Register("gcs", New) if err != nil { panic(err) } } ================================================ FILE: cache/gcs/adapter_test.go ================================================ //go:build !integration package gcs import ( "context" "errors" "fmt" "net/http" "testing" "time" "cloud.google.com/go/storage" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var ( accessID = "test-access-id@X.iam.gserviceaccount.com" privateKey = `-----BEGIN RSA PRIVATE KEY----- MIIEpQIBAAKCAQEAzIrvApxNX3VxH5eYe4vI2kLTqOA9uFTV4clGy8uzQsGQvMjl frTWCffayxaSvoKxPlvUYbecYpqqqaByLTE+kSDU/D44yrCiLAyWHWXYGZqfEMEG uHBg4fJK6KcIXlJ3Hp3EGTPw92sCKKzLXyoY7mNN9iP8mnshc39wjdrqm2YgKvQU ZWDxIL/MTtLcWyK07zJ2RamilcjpKtQL5GFgvHCsV1CvQHuKtmZF5kfHlD2E/e+I uEg+fntGkKJpDYtSn1fbLcg/ctFJKQBLfAaJ59Hgyewd8fKveJ6Vn1C7gCXagMPb q54RS8J0dolPaxUtRbzGMJ5Amag8m3dm6U3FbwIDAQABAoIBAQCxC+U8Vjymzwoe 9WIYNnOhcMyy1X63Cj+j00wDZQuCUffNYPs8xJysPizVM3HLk2aF+oiIGJ01wHjO oMGTmpd0mX2h5N3VnDSTekWJprj52Jusrdf6V9OUX9w1KzeUJT9Ucezmf84o6ygQ OxlCAzdXSP+XeajRspjO11V+hCokXSICAMMnUYyqT+Yr34YldjpVJ3VWFHipByww 1BCHBveJuH4wgVW4QICDKBzzYyFCqi8kFFv8ijQ9QOAD2xkVYiP8sOR1K6h/FuHN KV+axHtQjkYgOlyYN7/oe9L0XroCa4h7XibcWLuLQ56G3oBzTFur0la3A1SuKLGm LwBfeVpxAoGBAPCKUiqan24h8RgscEXtbACVa3WmEmOe4qqjnEChof8U5xP4YdfZ cg+k7eBqXBgVtmxozJOQxcPwkZrHIRP59d2h8vjcjOBrMeI3D9BCjTKGYySv0iRT FI0akA0c0Ec7utN4t7AfY7sUpx+wvX/klYy5bsIzOceU/9rYYoudXLnZAoGBANmw VWykOgJZLv8aSTLCDEl2WV6nsl1jRYONVzlthcgQ1wpdgAJvLoTJMuXuSzOQQbUa 08Zm2LhbDErX7YA8MslaiQERSfedV/EXjZn86CBw6wB4IPv8uWh9zSK7E4IH4Den Ow2RE5XjEDiyMA2PUCAGqVEmF/V4nRCFvEfS52SHAoGBAI56MA9CRTsz6Z3a/Km+ 5yE1YFBwjSXq//H5NV1nIBB6riE7F6GGEDTKCYjLFz/A5Kw0KzEhKLNV9LkMSECP 551fBw93fA6WEBchbEF8miwaQ/GAH2Yau+qUmEzcC1aWP6RxNcSh4y32HsP7qVNu 71JKqBtpwkjArghP8ZcnH7yJAoGBAJnHDxFoEfKGvcRH9V195uAeUpOjM0T1U63S ssNGszLZco9H7Z3KnLoAx4vWAhmy1jfxc5i8HmxdJRnZ31SvMdE7u3ydkfrxk6Yk VUtqdTA1lE0Ij4Ryyycdd0QJk4ZPufyWjgjPa15+wH7MoVVy388/5WwF1Pb69Tku wAqc2gkRAoGAcj8a+peaNKa1d5EPE0CtTBUypupZh/R1ewTC9y7OyBPczYhxN5NQ vvm6J1WGbnxmuhzzvGNNExeZx9dfGLmcvSAvrweiFbi2yHAc1cBLBkc5/CqfS6QW 336Qe2lgsM61/jrYYYqu7W8l6W2juCz0SPqml6rugsP8r6IMJxfziO8= -----END RSA PRIVATE KEY-----` bucketName = "test" objectName = "key" defaultTimeout = 1 * time.Hour maxUploadedArchiveSize = int64(100) ) func defaultGCSCache() *cacheconfig.Config { return &cacheconfig.Config{ Type: "gcs", GCS: &cacheconfig.CacheGCSConfig{ BucketName: bucketName, }, } } type adapterOperationInvalidConfigTestCase struct { noGCSConfig bool errorOnCredentialsResolverInitialization bool credentialsResolverResolveError bool accessID string privateKey string bucketName string expectedError string } func prepareMockedCredentialsResolverInitializer(t *testing.T, tc adapterOperationInvalidConfigTestCase) { oldCredentialsResolverInitializer := credentialsResolverInitializer credentialsResolverInitializer = func(config *cacheconfig.CacheGCSConfig) (*defaultCredentialsResolver, error) { if tc.errorOnCredentialsResolverInitialization { return nil, errors.New("test error") } return newDefaultCredentialsResolver(config) } t.Cleanup(func() { credentialsResolverInitializer = oldCredentialsResolverInitializer }) } func prepareMockedCredentialsResolverForInvalidConfig(t *testing.T, adapter *gcsAdapter, tc adapterOperationInvalidConfigTestCase) { cr := newMockCredentialsResolver(t) resolveCall := cr.On("Resolve").Maybe() if tc.credentialsResolverResolveError { resolveCall.Return(fmt.Errorf("test error")) } else { resolveCall.Return(nil) } cr.On("Credentials").Return(&cacheconfig.CacheGCSCredentials{ AccessID: tc.accessID, PrivateKey: tc.privateKey, }).Maybe() cr.On("SignBytesFunc", mock.Anything).Return(func(payload []byte) ([]byte, error) { return []byte("output"), nil }).Maybe() adapter.credentialsResolver = cr } func testAdapterOperationWithInvalidConfig( t *testing.T, name string, tc adapterOperationInvalidConfigTestCase, adapter *gcsAdapter, operation func(context.Context) cache.PresignedURL, ) { t.Run(name, func(t *testing.T) { prepareMockedCredentialsResolverForInvalidConfig(t, adapter, tc) hook := test.NewGlobal() u := operation(t.Context()) assert.Nil(t, u.URL) message, err := hook.LastEntry().String() require.NoError(t, err) assert.Contains(t, message, tc.expectedError) }) } func TestAdapterOperation_InvalidConfig(t *testing.T) { tests := map[string]adapterOperationInvalidConfigTestCase{ "no-gcs-config": { noGCSConfig: true, bucketName: bucketName, expectedError: "Missing GCS configuration", }, "error-on-credentials-resolver-initialization": { errorOnCredentialsResolverInitialization: true, }, "credentials-resolver-resolve-error": { credentialsResolverResolveError: true, bucketName: bucketName, expectedError: "error while resolving GCS credentials: test error", }, "no-credentials": { bucketName: bucketName, expectedError: "storage: missing required GoogleAccessID", }, "no-access-id": { privateKey: privateKey, bucketName: bucketName, expectedError: "storage: missing required GoogleAccessID", }, "bucket-not-specified": { accessID: "access-id", privateKey: privateKey, expectedError: "BucketName can't be empty", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { prepareMockedCredentialsResolverInitializer(t, tc) config := defaultGCSCache() if tc.noGCSConfig { config.GCS = nil } else { config.GCS.BucketName = tc.bucketName } a, err := New(config, defaultTimeout, objectName) if tc.noGCSConfig { assert.Nil(t, a) assert.EqualError(t, err, "missing GCS configuration") return } if tc.errorOnCredentialsResolverInitialization { assert.Nil(t, a) assert.EqualError(t, err, "error while initializing GCS credentials resolver: test error") return } require.NotNil(t, a) require.NoError(t, err) adapter, ok := a.(*gcsAdapter) require.True(t, ok, "Adapter should be properly casted to *adapter type") testAdapterOperationWithInvalidConfig(t, "GetDownloadURL", tc, adapter, a.GetDownloadURL) testAdapterOperationWithInvalidConfig(t, "GetUploadURL", tc, adapter, a.GetUploadURL) }) } } type adapterOperationTestCase struct { returnedURL string returnedError error assertErrorMessage func(t *testing.T, message string) signBlobAPITest bool maxUploadedArchiveSize int64 metadata map[string]string expectedHeaders http.Header } func mockSignBytesFunc(_ context.Context) func([]byte) ([]byte, error) { return func(payload []byte) ([]byte, error) { return []byte("output"), nil } } func prepareMockedCredentialsResolver(t *testing.T, adapter *gcsAdapter, tc adapterOperationTestCase) { cr := newMockCredentialsResolver(t) cr.On("Resolve").Return(nil).Once() pk := privateKey if tc.signBlobAPITest { pk = "" cr.On("SignBytesFunc", mock.Anything).Return(mockSignBytesFunc).Once() } cr.On("Credentials").Return(&cacheconfig.CacheGCSCredentials{ AccessID: accessID, PrivateKey: pk, }).Once() adapter.credentialsResolver = cr } func prepareMockedSignedURLGenerator( t *testing.T, tc adapterOperationTestCase, expectedMethod string, expectedContentType string, adapter *gcsAdapter, ) { adapter.generateSignedURL = func(bucket string, name string, opts *storage.SignedURLOptions) (string, error) { require.Equal(t, accessID, opts.GoogleAccessID) if tc.signBlobAPITest { require.NotNil(t, opts.SignBytes) require.Nil(t, opts.PrivateKey) } else { require.Equal(t, privateKey, string(opts.PrivateKey)) require.Nil(t, opts.SignBytes) } require.Equal(t, expectedMethod, opts.Method) require.Equal(t, expectedContentType, opts.ContentType) return tc.returnedURL, tc.returnedError } } func testAdapterOperation( t *testing.T, tc adapterOperationTestCase, name string, expectedMethod string, expectedContentType string, adapter *gcsAdapter, operation func(context.Context) cache.PresignedURL, ) { t.Run(name, func(t *testing.T) { prepareMockedCredentialsResolver(t, adapter, tc) prepareMockedSignedURLGenerator(t, tc, expectedMethod, expectedContentType, adapter) hook := test.NewGlobal() u := operation(t.Context()) if tc.assertErrorMessage != nil { message, err := hook.LastEntry().String() require.NoError(t, err) tc.assertErrorMessage(t, message) return } require.Len(t, hook.AllEntries(), 0) assert.Equal(t, tc.returnedURL, u.URL.String()) }) } func TestAdapterOperation(t *testing.T) { tests := map[string]adapterOperationTestCase{ "error-on-URL-signing": { returnedURL: "", returnedError: fmt.Errorf("test error"), assertErrorMessage: func(t *testing.T, message string) { assert.Contains(t, message, "error while generating GCS pre-signed URL: test error") }, signBlobAPITest: false, }, "invalid-URL-returned": { returnedURL: "://test", returnedError: nil, assertErrorMessage: func(t *testing.T, message string) { assert.Contains(t, message, "error while parsing generated URL: parse") assert.Contains(t, message, "://test") assert.Contains(t, message, "missing protocol scheme") }, signBlobAPITest: false, }, "valid-configuration": { returnedURL: "https://storage.googleapis.com/test/key?Expires=123456789&GoogleAccessId=test-access-id%40X.iam.gserviceaccount.com&Signature=XYZ", returnedError: nil, assertErrorMessage: nil, signBlobAPITest: false, }, "valid-configuration-with-metadata": { returnedURL: "https://storage.googleapis.com/test/key?Expires=123456789&GoogleAccessId=test-access-id%40X.iam.gserviceaccount.com&Signature=XYZ", metadata: map[string]string{"foo": "some foo"}, expectedHeaders: http.Header{"X-Goog-Meta-Foo": []string{"some foo"}}, }, "sign-blob-api-valid-configuration": { returnedURL: "https://storage.googleapis.com/test/key?Expires=123456789&GoogleAccessId=test-access-id%40X.iam.gserviceaccount.com&Signature=XYZ", returnedError: nil, assertErrorMessage: nil, signBlobAPITest: true, }, "max-cache-archive-size": { returnedURL: "https://storage.googleapis.com/test/key?Expires=123456789&GoogleAccessId=test-access-id%40X.iam.gserviceaccount.com&Signature=XYZ", returnedError: nil, assertErrorMessage: nil, signBlobAPITest: false, maxUploadedArchiveSize: maxUploadedArchiveSize, expectedHeaders: http.Header{"X-Goog-Content-Length-Range": []string{"0,100"}}, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { config := defaultGCSCache() config.MaxUploadedArchiveSize = tc.maxUploadedArchiveSize a, err := New(config, defaultTimeout, objectName) require.NoError(t, err) a.WithMetadata(tc.metadata) adapter, ok := a.(*gcsAdapter) require.True(t, ok, "Adapter should be properly casted to *adapter type") testAdapterOperation( t, tc, "GetDownloadURL", http.MethodGet, "", adapter, a.GetDownloadURL, ) testAdapterOperation( t, tc, "GetHeadURL", http.MethodHead, "", adapter, a.GetHeadURL, ) testAdapterOperation( t, tc, "GetUploadURL", http.MethodPut, "application/octet-stream", adapter, a.GetUploadURL, ) headers := adapter.GetUploadHeaders() if len(tc.expectedHeaders) < 1 { assert.Empty(t, headers, "expected headers to be empty") } else { assert.Equal(t, tc.expectedHeaders, headers, "headers do not match") } goCloudURL, err := adapter.GetGoCloudURL(t.Context(), true) assert.Nil(t, goCloudURL.URL) assert.NoError(t, err) assert.Empty(t, goCloudURL.Environment) }) } } ================================================ FILE: cache/gcs/credentials_resolver.go ================================================ package gcs import ( "context" "encoding/json" "fmt" "os" "cloud.google.com/go/compute/metadata" credentialsapiv1 "cloud.google.com/go/iam/credentials/apiv1" "cloud.google.com/go/iam/credentials/apiv1/credentialspb" gax "github.com/googleapis/gax-go/v2" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type credentialsResolver interface { Credentials() *cacheconfig.CacheGCSCredentials Resolve() error SignBytesFunc(context.Context) func([]byte) ([]byte, error) } type IamCredentialsClient interface { SignBlob( context.Context, *credentialspb.SignBlobRequest, ...gax.CallOption, ) (*credentialspb.SignBlobResponse, error) } type MetadataClient interface { Email(serviceAccount string) (string, error) } const TypeServiceAccount = "service_account" type credentialsFile struct { Type string `json:"type"` ClientEmail string `json:"client_email"` PrivateKey string `json:"private_key"` } type defaultCredentialsResolver struct { config *cacheconfig.CacheGCSConfig credentials *cacheconfig.CacheGCSCredentials metadataClient MetadataClient credentialsClient IamCredentialsClient } func (cr *defaultCredentialsResolver) Credentials() *cacheconfig.CacheGCSCredentials { return cr.credentials } func (cr *defaultCredentialsResolver) Resolve() error { if cr.config.CredentialsFile != "" { return cr.readCredentialsFromFile() } if cr.config.AccessID == "" && cr.config.PrivateKey == "" { return cr.readAccessIDFromMetadataServer() } return cr.readCredentialsFromConfig() } func (cr *defaultCredentialsResolver) SignBytesFunc(ctx context.Context) func([]byte) ([]byte, error) { return func(payload []byte) ([]byte, error) { req := &credentialspb.SignBlobRequest{ Name: cr.credentials.AccessID, Payload: payload, } client, err := cr.iamCredentialsClient(ctx) if err != nil { return nil, err } res, err := client.SignBlob(ctx, req) if err != nil { return nil, fmt.Errorf("signing blob: %w", err) } return res.SignedBlob, nil } } func (cr *defaultCredentialsResolver) readCredentialsFromFile() error { data, err := os.ReadFile(cr.config.CredentialsFile) if err != nil { return fmt.Errorf("error while reading credentials file: %w", err) } var credentialsFileContent credentialsFile err = json.Unmarshal(data, &credentialsFileContent) if err != nil { return fmt.Errorf("error while parsing credentials file: %w", err) } if credentialsFileContent.Type != TypeServiceAccount { return fmt.Errorf("unsupported credentials file type: %s", credentialsFileContent.Type) } logrus.Debugln("Credentials loaded from file. Skipping direct settings from Runner configuration file") cr.credentials.AccessID = credentialsFileContent.ClientEmail cr.credentials.PrivateKey = credentialsFileContent.PrivateKey return nil } func (cr *defaultCredentialsResolver) readCredentialsFromConfig() error { if cr.config.AccessID == "" || cr.config.PrivateKey == "" { return fmt.Errorf("GCS config present, but credentials are not configured") } cr.credentials.AccessID = cr.config.AccessID cr.credentials.PrivateKey = cr.config.PrivateKey return nil } func (cr *defaultCredentialsResolver) readAccessIDFromMetadataServer() error { email, err := cr.metadataClient.Email("") if err != nil { return fmt.Errorf("getting email from metadata server: %w", err) } cr.credentials.AccessID = email return nil } func (cr *defaultCredentialsResolver) iamCredentialsClient(ctx context.Context) (IamCredentialsClient, error) { if cr.credentialsClient == nil { var err error cr.credentialsClient, err = credentialsapiv1.NewIamCredentialsClient(ctx) if err != nil { return nil, fmt.Errorf("creating iam credentials client: %w", err) } } return cr.credentialsClient, nil } func newDefaultCredentialsResolver(config *cacheconfig.CacheGCSConfig) (*defaultCredentialsResolver, error) { if config == nil { return nil, fmt.Errorf("config can't be nil") } credentials := &defaultCredentialsResolver{ config: config, credentials: &cacheconfig.CacheGCSCredentials{}, metadataClient: metadata.NewClient(nil), } return credentials, nil } var credentialsResolverInitializer = newDefaultCredentialsResolver ================================================ FILE: cache/gcs/credentials_resolver_test.go ================================================ //go:build !integration package gcs import ( "encoding/json" "errors" "fmt" "os" "path/filepath" "testing" "cloud.google.com/go/iam/credentials/apiv1/credentialspb" "github.com/stretchr/testify/assert" mock "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var accessID2 = "test-access-id-2@X.iam.gserviceaccount.com" type credentialsResolverTestCase struct { config *cacheconfig.CacheGCSConfig credentialsFileContent *credentialsFile credentialsFileDoesNotExist bool credentialsFileWithInvalidJSON bool metadataServerError bool errorExpectedOnInitialization bool errorExpectedOnResolve bool expectedCredentials *cacheconfig.CacheGCSCredentials } func getCredentialsConfig(accessID string, privateKey string) *cacheconfig.CacheGCSConfig { return &cacheconfig.CacheGCSConfig{ CacheGCSCredentials: cacheconfig.CacheGCSCredentials{ AccessID: accessID, PrivateKey: privateKey, }, } } func getCredentialsFileContent(fileType string, clientEmail string, privateKey string) *credentialsFile { return &credentialsFile{ Type: fileType, ClientEmail: clientEmail, PrivateKey: privateKey, } } func getExpectedCredentials(accessID string, privateKey string) *cacheconfig.CacheGCSCredentials { return &cacheconfig.CacheGCSCredentials{ AccessID: accessID, PrivateKey: privateKey, } } func TestDefaultCredentialsResolver(t *testing.T) { cases := map[string]credentialsResolverTestCase{ "config is nil": { config: nil, credentialsFileContent: nil, errorExpectedOnInitialization: true, }, "credentials not set": { config: &cacheconfig.CacheGCSConfig{}, errorExpectedOnResolve: false, expectedCredentials: getExpectedCredentials(accessID, ""), }, "credentials not set - metadata server error": { config: &cacheconfig.CacheGCSConfig{}, metadataServerError: true, errorExpectedOnResolve: true, }, "credentials direct in config": { config: getCredentialsConfig(accessID, privateKey), errorExpectedOnResolve: false, expectedCredentials: getExpectedCredentials(accessID, privateKey), }, "credentials direct in config - only accessID": { config: getCredentialsConfig(accessID, ""), errorExpectedOnResolve: true, }, "credentials direct in config - only privatekey": { config: getCredentialsConfig("", privateKey), errorExpectedOnResolve: true, }, "credentials in credentials file - service account file": { config: &cacheconfig.CacheGCSConfig{}, credentialsFileContent: getCredentialsFileContent(TypeServiceAccount, accessID, privateKey), errorExpectedOnResolve: false, expectedCredentials: getExpectedCredentials(accessID, privateKey), }, "credentials in credentials file - unsupported type credentials file": { config: &cacheconfig.CacheGCSConfig{}, credentialsFileContent: getCredentialsFileContent("unknown_type", "", ""), errorExpectedOnResolve: true, }, "credentials in both places - credentials file takes precedence": { config: getCredentialsConfig(accessID, privateKey), credentialsFileContent: getCredentialsFileContent(TypeServiceAccount, accessID2, privateKey), errorExpectedOnResolve: false, expectedCredentials: getExpectedCredentials(accessID2, privateKey), }, "credentials in non-existing credentials file": { config: &cacheconfig.CacheGCSConfig{}, credentialsFileContent: getCredentialsFileContent(TypeServiceAccount, accessID, privateKey), credentialsFileDoesNotExist: true, errorExpectedOnResolve: true, }, "credentials in credentials file - invalid JSON": { config: &cacheconfig.CacheGCSConfig{}, credentialsFileContent: getCredentialsFileContent(TypeServiceAccount, accessID, privateKey), credentialsFileWithInvalidJSON: true, errorExpectedOnResolve: true, }, } for name, testCase := range cases { t.Run(name, func(t *testing.T) { if testCase.credentialsFileContent != nil { pathname := filepath.Join(t.TempDir(), "gcp-credentials-file") testCase.config.CredentialsFile = pathname switch { case testCase.credentialsFileDoesNotExist: // no-op case testCase.credentialsFileWithInvalidJSON: require.NoError(t, os.WriteFile(pathname, []byte("a"), 0o600)) default: data, err := json.Marshal(testCase.credentialsFileContent) require.NoError(t, err) require.NoError(t, os.WriteFile(pathname, data, 0o600)) } } mc := NewMockMetadataClient(t) metadataCall := mc.On("Email", mock.Anything).Maybe() if testCase.metadataServerError { metadataCall.Return("", fmt.Errorf("test error")) } else { metadataCall.Return(accessID, nil) } cr, err := newDefaultCredentialsResolver(testCase.config) if testCase.errorExpectedOnInitialization { assert.Error(t, err) return } require.NoError(t, err, "Error on resolver initialization is not expected") cr.metadataClient = mc err = cr.Resolve() if testCase.errorExpectedOnResolve { assert.Error(t, err) return } require.NoError(t, err, "Error on credentials resolving is not expected") assert.Equal(t, testCase.expectedCredentials, cr.Credentials()) }) } } type signBytesOperationTestCase struct { returnError error output []byte } func TestSignBytesOperation(t *testing.T) { tests := map[string]signBytesOperationTestCase{ "valid-sign": { returnError: nil, output: []byte("output"), }, "error": { returnError: errors.New("error"), output: nil, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { config := getCredentialsConfig(accessID, "") sbr := credentialspb.SignBlobResponse{SignedBlob: tc.output} icc := NewMockIamCredentialsClient(t) signBlobCall := icc.On("SignBlob", mock.Anything, mock.Anything).Maybe() cr, _ := newDefaultCredentialsResolver(config) if tc.returnError == nil { cr.credentialsClient = icc signBlobCall.Return(&sbr, nil) } else { signBlobCall.Return(nil, tc.returnError) } signed, err := cr.SignBytesFunc(t.Context())([]byte("input")) if tc.returnError == nil { assert.Nil(t, err) assert.Equal(t, signed, tc.output) } else { assert.ErrorAs(t, err, &tc.returnError) assert.Nil(t, signed) } }) } } ================================================ FILE: cache/gcs/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package gcs import ( "context" "cloud.google.com/go/iam/credentials/apiv1/credentialspb" "github.com/googleapis/gax-go/v2" mock "github.com/stretchr/testify/mock" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) // newMockCredentialsResolver creates a new instance of mockCredentialsResolver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockCredentialsResolver(t interface { mock.TestingT Cleanup(func()) }) *mockCredentialsResolver { mock := &mockCredentialsResolver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockCredentialsResolver is an autogenerated mock type for the credentialsResolver type type mockCredentialsResolver struct { mock.Mock } type mockCredentialsResolver_Expecter struct { mock *mock.Mock } func (_m *mockCredentialsResolver) EXPECT() *mockCredentialsResolver_Expecter { return &mockCredentialsResolver_Expecter{mock: &_m.Mock} } // Credentials provides a mock function for the type mockCredentialsResolver func (_mock *mockCredentialsResolver) Credentials() *cacheconfig.CacheGCSCredentials { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Credentials") } var r0 *cacheconfig.CacheGCSCredentials if returnFunc, ok := ret.Get(0).(func() *cacheconfig.CacheGCSCredentials); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*cacheconfig.CacheGCSCredentials) } } return r0 } // mockCredentialsResolver_Credentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Credentials' type mockCredentialsResolver_Credentials_Call struct { *mock.Call } // Credentials is a helper method to define mock.On call func (_e *mockCredentialsResolver_Expecter) Credentials() *mockCredentialsResolver_Credentials_Call { return &mockCredentialsResolver_Credentials_Call{Call: _e.mock.On("Credentials")} } func (_c *mockCredentialsResolver_Credentials_Call) Run(run func()) *mockCredentialsResolver_Credentials_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockCredentialsResolver_Credentials_Call) Return(cacheGCSCredentials *cacheconfig.CacheGCSCredentials) *mockCredentialsResolver_Credentials_Call { _c.Call.Return(cacheGCSCredentials) return _c } func (_c *mockCredentialsResolver_Credentials_Call) RunAndReturn(run func() *cacheconfig.CacheGCSCredentials) *mockCredentialsResolver_Credentials_Call { _c.Call.Return(run) return _c } // Resolve provides a mock function for the type mockCredentialsResolver func (_mock *mockCredentialsResolver) Resolve() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Resolve") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // mockCredentialsResolver_Resolve_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Resolve' type mockCredentialsResolver_Resolve_Call struct { *mock.Call } // Resolve is a helper method to define mock.On call func (_e *mockCredentialsResolver_Expecter) Resolve() *mockCredentialsResolver_Resolve_Call { return &mockCredentialsResolver_Resolve_Call{Call: _e.mock.On("Resolve")} } func (_c *mockCredentialsResolver_Resolve_Call) Run(run func()) *mockCredentialsResolver_Resolve_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockCredentialsResolver_Resolve_Call) Return(err error) *mockCredentialsResolver_Resolve_Call { _c.Call.Return(err) return _c } func (_c *mockCredentialsResolver_Resolve_Call) RunAndReturn(run func() error) *mockCredentialsResolver_Resolve_Call { _c.Call.Return(run) return _c } // SignBytesFunc provides a mock function for the type mockCredentialsResolver func (_mock *mockCredentialsResolver) SignBytesFunc(context1 context.Context) func([]byte) ([]byte, error) { ret := _mock.Called(context1) if len(ret) == 0 { panic("no return value specified for SignBytesFunc") } var r0 func([]byte) ([]byte, error) if returnFunc, ok := ret.Get(0).(func(context.Context) func([]byte) ([]byte, error)); ok { r0 = returnFunc(context1) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(func([]byte) ([]byte, error)) } } return r0 } // mockCredentialsResolver_SignBytesFunc_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SignBytesFunc' type mockCredentialsResolver_SignBytesFunc_Call struct { *mock.Call } // SignBytesFunc is a helper method to define mock.On call // - context1 context.Context func (_e *mockCredentialsResolver_Expecter) SignBytesFunc(context1 interface{}) *mockCredentialsResolver_SignBytesFunc_Call { return &mockCredentialsResolver_SignBytesFunc_Call{Call: _e.mock.On("SignBytesFunc", context1)} } func (_c *mockCredentialsResolver_SignBytesFunc_Call) Run(run func(context1 context.Context)) *mockCredentialsResolver_SignBytesFunc_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *mockCredentialsResolver_SignBytesFunc_Call) Return(fn func([]byte) ([]byte, error)) *mockCredentialsResolver_SignBytesFunc_Call { _c.Call.Return(fn) return _c } func (_c *mockCredentialsResolver_SignBytesFunc_Call) RunAndReturn(run func(context1 context.Context) func([]byte) ([]byte, error)) *mockCredentialsResolver_SignBytesFunc_Call { _c.Call.Return(run) return _c } // NewMockIamCredentialsClient creates a new instance of MockIamCredentialsClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockIamCredentialsClient(t interface { mock.TestingT Cleanup(func()) }) *MockIamCredentialsClient { mock := &MockIamCredentialsClient{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockIamCredentialsClient is an autogenerated mock type for the IamCredentialsClient type type MockIamCredentialsClient struct { mock.Mock } type MockIamCredentialsClient_Expecter struct { mock *mock.Mock } func (_m *MockIamCredentialsClient) EXPECT() *MockIamCredentialsClient_Expecter { return &MockIamCredentialsClient_Expecter{mock: &_m.Mock} } // SignBlob provides a mock function for the type MockIamCredentialsClient func (_mock *MockIamCredentialsClient) SignBlob(context1 context.Context, signBlobRequest *credentialspb.SignBlobRequest, callOptions ...gax.CallOption) (*credentialspb.SignBlobResponse, error) { // gax.CallOption _va := make([]interface{}, len(callOptions)) for _i := range callOptions { _va[_i] = callOptions[_i] } var _ca []interface{} _ca = append(_ca, context1, signBlobRequest) _ca = append(_ca, _va...) ret := _mock.Called(_ca...) if len(ret) == 0 { panic("no return value specified for SignBlob") } var r0 *credentialspb.SignBlobResponse var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, *credentialspb.SignBlobRequest, ...gax.CallOption) (*credentialspb.SignBlobResponse, error)); ok { return returnFunc(context1, signBlobRequest, callOptions...) } if returnFunc, ok := ret.Get(0).(func(context.Context, *credentialspb.SignBlobRequest, ...gax.CallOption) *credentialspb.SignBlobResponse); ok { r0 = returnFunc(context1, signBlobRequest, callOptions...) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*credentialspb.SignBlobResponse) } } if returnFunc, ok := ret.Get(1).(func(context.Context, *credentialspb.SignBlobRequest, ...gax.CallOption) error); ok { r1 = returnFunc(context1, signBlobRequest, callOptions...) } else { r1 = ret.Error(1) } return r0, r1 } // MockIamCredentialsClient_SignBlob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SignBlob' type MockIamCredentialsClient_SignBlob_Call struct { *mock.Call } // SignBlob is a helper method to define mock.On call // - context1 context.Context // - signBlobRequest *credentialspb.SignBlobRequest // - callOptions ...gax.CallOption func (_e *MockIamCredentialsClient_Expecter) SignBlob(context1 interface{}, signBlobRequest interface{}, callOptions ...interface{}) *MockIamCredentialsClient_SignBlob_Call { return &MockIamCredentialsClient_SignBlob_Call{Call: _e.mock.On("SignBlob", append([]interface{}{context1, signBlobRequest}, callOptions...)...)} } func (_c *MockIamCredentialsClient_SignBlob_Call) Run(run func(context1 context.Context, signBlobRequest *credentialspb.SignBlobRequest, callOptions ...gax.CallOption)) *MockIamCredentialsClient_SignBlob_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 *credentialspb.SignBlobRequest if args[1] != nil { arg1 = args[1].(*credentialspb.SignBlobRequest) } var arg2 []gax.CallOption variadicArgs := make([]gax.CallOption, len(args)-2) for i, a := range args[2:] { if a != nil { variadicArgs[i] = a.(gax.CallOption) } } arg2 = variadicArgs run( arg0, arg1, arg2..., ) }) return _c } func (_c *MockIamCredentialsClient_SignBlob_Call) Return(signBlobResponse *credentialspb.SignBlobResponse, err error) *MockIamCredentialsClient_SignBlob_Call { _c.Call.Return(signBlobResponse, err) return _c } func (_c *MockIamCredentialsClient_SignBlob_Call) RunAndReturn(run func(context1 context.Context, signBlobRequest *credentialspb.SignBlobRequest, callOptions ...gax.CallOption) (*credentialspb.SignBlobResponse, error)) *MockIamCredentialsClient_SignBlob_Call { _c.Call.Return(run) return _c } // NewMockMetadataClient creates a new instance of MockMetadataClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockMetadataClient(t interface { mock.TestingT Cleanup(func()) }) *MockMetadataClient { mock := &MockMetadataClient{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockMetadataClient is an autogenerated mock type for the MetadataClient type type MockMetadataClient struct { mock.Mock } type MockMetadataClient_Expecter struct { mock *mock.Mock } func (_m *MockMetadataClient) EXPECT() *MockMetadataClient_Expecter { return &MockMetadataClient_Expecter{mock: &_m.Mock} } // Email provides a mock function for the type MockMetadataClient func (_mock *MockMetadataClient) Email(serviceAccount string) (string, error) { ret := _mock.Called(serviceAccount) if len(ret) == 0 { panic("no return value specified for Email") } var r0 string var r1 error if returnFunc, ok := ret.Get(0).(func(string) (string, error)); ok { return returnFunc(serviceAccount) } if returnFunc, ok := ret.Get(0).(func(string) string); ok { r0 = returnFunc(serviceAccount) } else { r0 = ret.Get(0).(string) } if returnFunc, ok := ret.Get(1).(func(string) error); ok { r1 = returnFunc(serviceAccount) } else { r1 = ret.Error(1) } return r0, r1 } // MockMetadataClient_Email_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Email' type MockMetadataClient_Email_Call struct { *mock.Call } // Email is a helper method to define mock.On call // - serviceAccount string func (_e *MockMetadataClient_Expecter) Email(serviceAccount interface{}) *MockMetadataClient_Email_Call { return &MockMetadataClient_Email_Call{Call: _e.mock.On("Email", serviceAccount)} } func (_c *MockMetadataClient_Email_Call) Run(run func(serviceAccount string)) *MockMetadataClient_Email_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 string if args[0] != nil { arg0 = args[0].(string) } run( arg0, ) }) return _c } func (_c *MockMetadataClient_Email_Call) Return(s string, err error) *MockMetadataClient_Email_Call { _c.Call.Return(s, err) return _c } func (_c *MockMetadataClient_Email_Call) RunAndReturn(run func(serviceAccount string) (string, error)) *MockMetadataClient_Email_Call { _c.Call.Return(run) return _c } ================================================ FILE: cache/gcsv2/adapter.go ================================================ package gcsv2 import ( "context" "fmt" "net/http" "net/url" "strings" "time" "cloud.google.com/go/storage" "github.com/sirupsen/logrus" "google.golang.org/api/option" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type gcsAdapter struct { timeout time.Duration config *cacheconfig.CacheGCSConfig objectName string maxUploadedArchiveSize int64 metadata map[string]string } func (a *gcsAdapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { u, err := a.presignURL(ctx, http.MethodGet, "") if err != nil { logrus.Error(err) } return cache.PresignedURL{URL: u} } func (a *gcsAdapter) GetHeadURL(ctx context.Context) cache.PresignedURL { u, err := a.presignURL(ctx, http.MethodHead, "") if err != nil { logrus.Error(err) } return cache.PresignedURL{URL: u} } func (a *gcsAdapter) GetUploadURL(ctx context.Context) cache.PresignedURL { u, err := a.presignURL(ctx, http.MethodPut, "application/octet-stream") if err != nil { logrus.Error(err) } return cache.PresignedURL{URL: u, Headers: a.GetUploadHeaders()} } func (a *gcsAdapter) GetUploadHeaders() http.Header { headers := http.Header{} if a.maxUploadedArchiveSize > 0 { headers.Set("X-Goog-Content-Length-Range", fmt.Sprintf("0,%d", a.maxUploadedArchiveSize)) } for k, v := range a.metadata { headers.Set("x-goog-meta-"+k, v) } return headers } func (a *gcsAdapter) GetGoCloudURL(_ context.Context, _ bool) (cache.GoCloudURL, error) { return cache.GoCloudURL{}, nil } func (a *gcsAdapter) WithMetadata(metadata map[string]string) { a.metadata = metadata } func (a *gcsAdapter) presignURL(ctx context.Context, method string, contentType string) (*url.URL, error) { if a.config.BucketName == "" { return nil, fmt.Errorf("config BucketName cannot be empty") } var options []option.ClientOption switch { case a.config.CredentialsFile != "": options = append(options, option.WithCredentialsFile(a.config.CredentialsFile)) // nolint:staticcheck case a.config.AccessID != "" || a.config.PrivateKey != "": // if providing accessID / privateKey for signing, then we don't need the // storage client to authenticate options = append(options, option.WithoutAuthentication()) } if a.config.UniverseDomain != "" { options = append(options, option.WithUniverseDomain(a.config.UniverseDomain)) } client, err := storage.NewClient(ctx, options...) if err != nil { return nil, fmt.Errorf("creating storage client: %w", err) } defer client.Close() // if accessID/private key is not provided, then the storage client's // authentication will be used. suo := &storage.SignedURLOptions{ GoogleAccessID: a.config.AccessID, Method: method, Expires: time.Now().Add(a.timeout), ContentType: contentType, } if a.config.PrivateKey != "" { suo.PrivateKey = []byte(a.config.PrivateKey) } if method == http.MethodPut { suo.Headers = []string{} for key, values := range a.GetUploadHeaders() { suo.Headers = append(suo.Headers, fmt.Sprintf("%s:%s", key, strings.Join(values, ";"))) } } rawURL, err := client.Bucket(a.config.BucketName).SignedURL(a.objectName, suo) if err != nil { return nil, fmt.Errorf("generating signed URL: %w", err) } u, err := url.Parse(rawURL) if err != nil { return nil, fmt.Errorf("parsing signed URL: %w", err) } return u, nil } func New(config *cacheconfig.Config, timeout time.Duration, objectName string) (cache.Adapter, error) { gcs := config.GCS if gcs == nil { return nil, fmt.Errorf("missing GCS configuration") } return &gcsAdapter{ config: gcs, timeout: timeout, objectName: objectName, maxUploadedArchiveSize: config.MaxUploadedArchiveSize, }, nil } func init() { err := cache.Factories().Register("gcsv2", New) if err != nil { panic(err) } } ================================================ FILE: cache/gcsv2/adapter_test.go ================================================ //go:build !integration package gcsv2 import ( "net/http" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var ( accessID = "test-access-id@X.iam.gserviceaccount.com" privateKey = `-----BEGIN RSA PRIVATE KEY----- MIIEpQIBAAKCAQEAzIrvApxNX3VxH5eYe4vI2kLTqOA9uFTV4clGy8uzQsGQvMjl frTWCffayxaSvoKxPlvUYbecYpqqqaByLTE+kSDU/D44yrCiLAyWHWXYGZqfEMEG uHBg4fJK6KcIXlJ3Hp3EGTPw92sCKKzLXyoY7mNN9iP8mnshc39wjdrqm2YgKvQU ZWDxIL/MTtLcWyK07zJ2RamilcjpKtQL5GFgvHCsV1CvQHuKtmZF5kfHlD2E/e+I uEg+fntGkKJpDYtSn1fbLcg/ctFJKQBLfAaJ59Hgyewd8fKveJ6Vn1C7gCXagMPb q54RS8J0dolPaxUtRbzGMJ5Amag8m3dm6U3FbwIDAQABAoIBAQCxC+U8Vjymzwoe 9WIYNnOhcMyy1X63Cj+j00wDZQuCUffNYPs8xJysPizVM3HLk2aF+oiIGJ01wHjO oMGTmpd0mX2h5N3VnDSTekWJprj52Jusrdf6V9OUX9w1KzeUJT9Ucezmf84o6ygQ OxlCAzdXSP+XeajRspjO11V+hCokXSICAMMnUYyqT+Yr34YldjpVJ3VWFHipByww 1BCHBveJuH4wgVW4QICDKBzzYyFCqi8kFFv8ijQ9QOAD2xkVYiP8sOR1K6h/FuHN KV+axHtQjkYgOlyYN7/oe9L0XroCa4h7XibcWLuLQ56G3oBzTFur0la3A1SuKLGm LwBfeVpxAoGBAPCKUiqan24h8RgscEXtbACVa3WmEmOe4qqjnEChof8U5xP4YdfZ cg+k7eBqXBgVtmxozJOQxcPwkZrHIRP59d2h8vjcjOBrMeI3D9BCjTKGYySv0iRT FI0akA0c0Ec7utN4t7AfY7sUpx+wvX/klYy5bsIzOceU/9rYYoudXLnZAoGBANmw VWykOgJZLv8aSTLCDEl2WV6nsl1jRYONVzlthcgQ1wpdgAJvLoTJMuXuSzOQQbUa 08Zm2LhbDErX7YA8MslaiQERSfedV/EXjZn86CBw6wB4IPv8uWh9zSK7E4IH4Den Ow2RE5XjEDiyMA2PUCAGqVEmF/V4nRCFvEfS52SHAoGBAI56MA9CRTsz6Z3a/Km+ 5yE1YFBwjSXq//H5NV1nIBB6riE7F6GGEDTKCYjLFz/A5Kw0KzEhKLNV9LkMSECP 551fBw93fA6WEBchbEF8miwaQ/GAH2Yau+qUmEzcC1aWP6RxNcSh4y32HsP7qVNu 71JKqBtpwkjArghP8ZcnH7yJAoGBAJnHDxFoEfKGvcRH9V195uAeUpOjM0T1U63S ssNGszLZco9H7Z3KnLoAx4vWAhmy1jfxc5i8HmxdJRnZ31SvMdE7u3ydkfrxk6Yk VUtqdTA1lE0Ij4Ryyycdd0QJk4ZPufyWjgjPa15+wH7MoVVy388/5WwF1Pb69Tku wAqc2gkRAoGAcj8a+peaNKa1d5EPE0CtTBUypupZh/R1ewTC9y7OyBPczYhxN5NQ vvm6J1WGbnxmuhzzvGNNExeZx9dfGLmcvSAvrweiFbi2yHAc1cBLBkc5/CqfS6QW 336Qe2lgsM61/jrYYYqu7W8l6W2juCz0SPqml6rugsP8r6IMJxfziO8= -----END RSA PRIVATE KEY-----` ) func TestNew(t *testing.T) { t.Run("no config", func(t *testing.T) { adapter, err := New(&cacheconfig.Config{}, time.Second, "bucket") require.ErrorContains(t, err, "missing GCS configuration") require.Nil(t, adapter) }) t.Run("valid", func(t *testing.T) { adapter, err := New(&cacheconfig.Config{GCS: &cacheconfig.CacheGCSConfig{}}, time.Second, "bucket") require.NoError(t, err) require.NotNil(t, adapter) }) } func TestAdapter(t *testing.T) { tests := map[string]struct { config *cacheconfig.Config timeout time.Duration objectName string metadata map[string]string newExpectedErr string getExpectedErr string putExpectedErr string expectedUploadHeaders http.Header }{ "missing config": { config: &cacheconfig.Config{}, objectName: "object-key", newExpectedErr: "missing GCS configuration", }, "no bucket name": { config: &cacheconfig.Config{GCS: &cacheconfig.CacheGCSConfig{}}, objectName: "object-key", getExpectedErr: "config BucketName cannot be empty", putExpectedErr: "config BucketName cannot be empty", }, "valid": { config: &cacheconfig.Config{GCS: &cacheconfig.CacheGCSConfig{BucketName: "test", CacheGCSCredentials: cacheconfig.CacheGCSCredentials{AccessID: accessID, PrivateKey: privateKey}}}, objectName: "object-key", }, "valid with max upload size": { config: &cacheconfig.Config{MaxUploadedArchiveSize: 100, GCS: &cacheconfig.CacheGCSConfig{BucketName: "test", CacheGCSCredentials: cacheconfig.CacheGCSCredentials{AccessID: accessID, PrivateKey: privateKey}}}, objectName: "object-key", expectedUploadHeaders: http.Header{"X-Goog-Content-Length-Range": []string{"0,100"}}, }, "with metadata": { config: &cacheconfig.Config{GCS: &cacheconfig.CacheGCSConfig{BucketName: "test", CacheGCSCredentials: cacheconfig.CacheGCSCredentials{AccessID: accessID, PrivateKey: privateKey}}}, objectName: "object-key", metadata: map[string]string{"foo": "some foo"}, expectedUploadHeaders: http.Header{"X-Goog-Meta-Foo": []string{"some foo"}}, }, } const expectedURL = "https://storage.googleapis.com/test/object-key" for tn, tc := range tests { t.Run(tn, func(t *testing.T) { adapter, err := New(tc.config, tc.timeout, tc.objectName) if tc.newExpectedErr != "" { require.EqualError(t, err, tc.newExpectedErr) require.Nil(t, adapter) return } require.NoError(t, err) require.NotNil(t, adapter) adapter.WithMetadata(tc.metadata) getURL, err := adapter.(*gcsAdapter).presignURL(t.Context(), http.MethodGet, "") if tc.getExpectedErr != "" { assert.EqualError(t, err, tc.getExpectedErr) } else { assert.NoError(t, err) } putURL, err := adapter.(*gcsAdapter).presignURL(t.Context(), http.MethodPut, "application/octet-stream") if tc.putExpectedErr != "" { assert.EqualError(t, err, tc.putExpectedErr) } else { assert.NoError(t, err) } if getURL != nil { assert.Contains(t, getURL.String(), expectedURL) u := adapter.GetDownloadURL(t.Context()) require.NotNil(t, u) assert.Contains(t, u.URL.String(), expectedURL) headURL, err := adapter.(*gcsAdapter).presignURL(t.Context(), http.MethodHead, "") require.NoError(t, err) assert.Contains(t, headURL.String(), expectedURL) hu := adapter.GetHeadURL(t.Context()) require.NotNil(t, hu) assert.Contains(t, hu.URL.String(), expectedURL) } if putURL != nil { assert.Contains(t, putURL.String(), expectedURL) u := adapter.GetUploadURL(t.Context()) require.NotNil(t, u) assert.Contains(t, u.URL.String(), expectedURL) headers := u.Headers if len(tc.expectedUploadHeaders) < 1 { assert.Empty(t, headers, "expected upload header to be empty") } else { assert.Equal(t, tc.expectedUploadHeaders, headers, "upload headers mismatch") } } u, err := adapter.GetGoCloudURL(t.Context(), false) assert.NoError(t, err) assert.Nil(t, u.URL) u, err = adapter.GetGoCloudURL(t.Context(), true) assert.NoError(t, err) assert.Nil(t, u.URL) }) } } ================================================ FILE: cache/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package cache import ( "context" mock "github.com/stretchr/testify/mock" ) // NewMockAdapter creates a new instance of MockAdapter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockAdapter(t interface { mock.TestingT Cleanup(func()) }) *MockAdapter { mock := &MockAdapter{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockAdapter is an autogenerated mock type for the Adapter type type MockAdapter struct { mock.Mock } type MockAdapter_Expecter struct { mock *mock.Mock } func (_m *MockAdapter) EXPECT() *MockAdapter_Expecter { return &MockAdapter_Expecter{mock: &_m.Mock} } // GetDownloadURL provides a mock function for the type MockAdapter func (_mock *MockAdapter) GetDownloadURL(context1 context.Context) PresignedURL { ret := _mock.Called(context1) if len(ret) == 0 { panic("no return value specified for GetDownloadURL") } var r0 PresignedURL if returnFunc, ok := ret.Get(0).(func(context.Context) PresignedURL); ok { r0 = returnFunc(context1) } else { r0 = ret.Get(0).(PresignedURL) } return r0 } // MockAdapter_GetDownloadURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetDownloadURL' type MockAdapter_GetDownloadURL_Call struct { *mock.Call } // GetDownloadURL is a helper method to define mock.On call // - context1 context.Context func (_e *MockAdapter_Expecter) GetDownloadURL(context1 interface{}) *MockAdapter_GetDownloadURL_Call { return &MockAdapter_GetDownloadURL_Call{Call: _e.mock.On("GetDownloadURL", context1)} } func (_c *MockAdapter_GetDownloadURL_Call) Run(run func(context1 context.Context)) *MockAdapter_GetDownloadURL_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockAdapter_GetDownloadURL_Call) Return(presignedURL PresignedURL) *MockAdapter_GetDownloadURL_Call { _c.Call.Return(presignedURL) return _c } func (_c *MockAdapter_GetDownloadURL_Call) RunAndReturn(run func(context1 context.Context) PresignedURL) *MockAdapter_GetDownloadURL_Call { _c.Call.Return(run) return _c } // GetGoCloudURL provides a mock function for the type MockAdapter func (_mock *MockAdapter) GetGoCloudURL(ctx context.Context, upload bool) (GoCloudURL, error) { ret := _mock.Called(ctx, upload) if len(ret) == 0 { panic("no return value specified for GetGoCloudURL") } var r0 GoCloudURL var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, bool) (GoCloudURL, error)); ok { return returnFunc(ctx, upload) } if returnFunc, ok := ret.Get(0).(func(context.Context, bool) GoCloudURL); ok { r0 = returnFunc(ctx, upload) } else { r0 = ret.Get(0).(GoCloudURL) } if returnFunc, ok := ret.Get(1).(func(context.Context, bool) error); ok { r1 = returnFunc(ctx, upload) } else { r1 = ret.Error(1) } return r0, r1 } // MockAdapter_GetGoCloudURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetGoCloudURL' type MockAdapter_GetGoCloudURL_Call struct { *mock.Call } // GetGoCloudURL is a helper method to define mock.On call // - ctx context.Context // - upload bool func (_e *MockAdapter_Expecter) GetGoCloudURL(ctx interface{}, upload interface{}) *MockAdapter_GetGoCloudURL_Call { return &MockAdapter_GetGoCloudURL_Call{Call: _e.mock.On("GetGoCloudURL", ctx, upload)} } func (_c *MockAdapter_GetGoCloudURL_Call) Run(run func(ctx context.Context, upload bool)) *MockAdapter_GetGoCloudURL_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 bool if args[1] != nil { arg1 = args[1].(bool) } run( arg0, arg1, ) }) return _c } func (_c *MockAdapter_GetGoCloudURL_Call) Return(goCloudURL GoCloudURL, err error) *MockAdapter_GetGoCloudURL_Call { _c.Call.Return(goCloudURL, err) return _c } func (_c *MockAdapter_GetGoCloudURL_Call) RunAndReturn(run func(ctx context.Context, upload bool) (GoCloudURL, error)) *MockAdapter_GetGoCloudURL_Call { _c.Call.Return(run) return _c } // GetHeadURL provides a mock function for the type MockAdapter func (_mock *MockAdapter) GetHeadURL(context1 context.Context) PresignedURL { ret := _mock.Called(context1) if len(ret) == 0 { panic("no return value specified for GetHeadURL") } var r0 PresignedURL if returnFunc, ok := ret.Get(0).(func(context.Context) PresignedURL); ok { r0 = returnFunc(context1) } else { r0 = ret.Get(0).(PresignedURL) } return r0 } // MockAdapter_GetHeadURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetHeadURL' type MockAdapter_GetHeadURL_Call struct { *mock.Call } // GetHeadURL is a helper method to define mock.On call // - context1 context.Context func (_e *MockAdapter_Expecter) GetHeadURL(context1 interface{}) *MockAdapter_GetHeadURL_Call { return &MockAdapter_GetHeadURL_Call{Call: _e.mock.On("GetHeadURL", context1)} } func (_c *MockAdapter_GetHeadURL_Call) Run(run func(context1 context.Context)) *MockAdapter_GetHeadURL_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockAdapter_GetHeadURL_Call) Return(presignedURL PresignedURL) *MockAdapter_GetHeadURL_Call { _c.Call.Return(presignedURL) return _c } func (_c *MockAdapter_GetHeadURL_Call) RunAndReturn(run func(context1 context.Context) PresignedURL) *MockAdapter_GetHeadURL_Call { _c.Call.Return(run) return _c } // GetUploadURL provides a mock function for the type MockAdapter func (_mock *MockAdapter) GetUploadURL(context1 context.Context) PresignedURL { ret := _mock.Called(context1) if len(ret) == 0 { panic("no return value specified for GetUploadURL") } var r0 PresignedURL if returnFunc, ok := ret.Get(0).(func(context.Context) PresignedURL); ok { r0 = returnFunc(context1) } else { r0 = ret.Get(0).(PresignedURL) } return r0 } // MockAdapter_GetUploadURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetUploadURL' type MockAdapter_GetUploadURL_Call struct { *mock.Call } // GetUploadURL is a helper method to define mock.On call // - context1 context.Context func (_e *MockAdapter_Expecter) GetUploadURL(context1 interface{}) *MockAdapter_GetUploadURL_Call { return &MockAdapter_GetUploadURL_Call{Call: _e.mock.On("GetUploadURL", context1)} } func (_c *MockAdapter_GetUploadURL_Call) Run(run func(context1 context.Context)) *MockAdapter_GetUploadURL_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockAdapter_GetUploadURL_Call) Return(presignedURL PresignedURL) *MockAdapter_GetUploadURL_Call { _c.Call.Return(presignedURL) return _c } func (_c *MockAdapter_GetUploadURL_Call) RunAndReturn(run func(context1 context.Context) PresignedURL) *MockAdapter_GetUploadURL_Call { _c.Call.Return(run) return _c } // WithMetadata provides a mock function for the type MockAdapter func (_mock *MockAdapter) WithMetadata(stringToString map[string]string) { _mock.Called(stringToString) return } // MockAdapter_WithMetadata_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithMetadata' type MockAdapter_WithMetadata_Call struct { *mock.Call } // WithMetadata is a helper method to define mock.On call // - stringToString map[string]string func (_e *MockAdapter_Expecter) WithMetadata(stringToString interface{}) *MockAdapter_WithMetadata_Call { return &MockAdapter_WithMetadata_Call{Call: _e.mock.On("WithMetadata", stringToString)} } func (_c *MockAdapter_WithMetadata_Call) Run(run func(stringToString map[string]string)) *MockAdapter_WithMetadata_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 map[string]string if args[0] != nil { arg0 = args[0].(map[string]string) } run( arg0, ) }) return _c } func (_c *MockAdapter_WithMetadata_Call) Return() *MockAdapter_WithMetadata_Call { _c.Call.Return() return _c } func (_c *MockAdapter_WithMetadata_Call) RunAndReturn(run func(stringToString map[string]string)) *MockAdapter_WithMetadata_Call { _c.Run(run) return _c } // NewMockCredentialsAdapter creates a new instance of MockCredentialsAdapter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockCredentialsAdapter(t interface { mock.TestingT Cleanup(func()) }) *MockCredentialsAdapter { mock := &MockCredentialsAdapter{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockCredentialsAdapter is an autogenerated mock type for the CredentialsAdapter type type MockCredentialsAdapter struct { mock.Mock } type MockCredentialsAdapter_Expecter struct { mock *mock.Mock } func (_m *MockCredentialsAdapter) EXPECT() *MockCredentialsAdapter_Expecter { return &MockCredentialsAdapter_Expecter{mock: &_m.Mock} } // GetCredentials provides a mock function for the type MockCredentialsAdapter func (_mock *MockCredentialsAdapter) GetCredentials() map[string]string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetCredentials") } var r0 map[string]string if returnFunc, ok := ret.Get(0).(func() map[string]string); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(map[string]string) } } return r0 } // MockCredentialsAdapter_GetCredentials_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetCredentials' type MockCredentialsAdapter_GetCredentials_Call struct { *mock.Call } // GetCredentials is a helper method to define mock.On call func (_e *MockCredentialsAdapter_Expecter) GetCredentials() *MockCredentialsAdapter_GetCredentials_Call { return &MockCredentialsAdapter_GetCredentials_Call{Call: _e.mock.On("GetCredentials")} } func (_c *MockCredentialsAdapter_GetCredentials_Call) Run(run func()) *MockCredentialsAdapter_GetCredentials_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockCredentialsAdapter_GetCredentials_Call) Return(stringToString map[string]string) *MockCredentialsAdapter_GetCredentials_Call { _c.Call.Return(stringToString) return _c } func (_c *MockCredentialsAdapter_GetCredentials_Call) RunAndReturn(run func() map[string]string) *MockCredentialsAdapter_GetCredentials_Call { _c.Call.Return(run) return _c } ================================================ FILE: cache/s3/adapter.go ================================================ package s3 import ( "context" "fmt" "net/http" "strings" "time" "github.com/minio/minio-go/v7/pkg/encrypt" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type s3Adapter struct { timeout time.Duration config *cacheconfig.CacheS3Config objectName string client minioClient metadata map[string]string } func (a *s3Adapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { URL, err := a.client.PresignHeader( ctx, http.MethodGet, a.config.BucketName, a.objectName, a.timeout, nil, nil, ) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } return cache.PresignedURL{URL: URL} } func (a *s3Adapter) GetHeadURL(ctx context.Context) cache.PresignedURL { URL, err := a.client.PresignHeader( ctx, http.MethodHead, a.config.BucketName, a.objectName, a.timeout, nil, nil, ) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } return cache.PresignedURL{URL: URL} } func (a *s3Adapter) GetUploadURL(ctx context.Context) cache.PresignedURL { headers := a.GetUploadHeaders() // Note: PresignHeader means, we need the exact same headers to be used when getting the presigned URL and when // actuallt uploading. URL, err := a.client.PresignHeader( ctx, http.MethodPut, a.config.BucketName, a.objectName, a.timeout, nil, headers, ) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } return cache.PresignedURL{URL: URL, Headers: headers} } func (a *s3Adapter) GetUploadHeaders() http.Header { ss, err := func() (encrypt.ServerSide, error) { switch encrypt.Type(strings.ToUpper(a.config.ServerSideEncryption)) { case encrypt.S3: return encrypt.NewSSE(), nil case encrypt.KMS: ss, err := encrypt.NewSSEKMS(a.config.ServerSideEncryptionKeyID, nil) if err != nil { return nil, fmt.Errorf("initializing server-side-encryption key id: %w", err) } return ss, nil default: return nil, nil } }() if err != nil { logrus.WithError(err).Error("error configuring S3 SSE configuration") return nil } headers := http.Header{} if ss != nil { ss.Marshal(headers) } // Using e.g. a `x-amz-meta-cacheKey` header shows: // - on the WebUI: // | User defined | x-amz-meta-cachekey | qwe-protected-non_protected | // - on the API: // ; aws s3api head-object --bucket $bucket --key $blob | jq .Metadata // { // "cachekey": "qwe-protected-non_protected" // } for k, v := range a.metadata { headers.Set("x-amz-meta-"+k, v) } return headers } func (a *s3Adapter) GetGoCloudURL(_ context.Context, _ bool) (cache.GoCloudURL, error) { return cache.GoCloudURL{}, nil } func (a *s3Adapter) WithMetadata(metadata map[string]string) { a.metadata = metadata } func New(config *cacheconfig.Config, timeout time.Duration, objectName string) (cache.Adapter, error) { s3 := config.S3 if s3 == nil { return nil, fmt.Errorf("missing S3 configuration") } client, err := newMinioClient(s3) if err != nil { return nil, fmt.Errorf("error while creating S3 cache storage client: %w", err) } a := &s3Adapter{ config: s3, timeout: timeout, objectName: objectName, client: client, } return a, nil } func init() { err := cache.Factories().Register("s3", New) if err != nil { panic(err) } } ================================================ FILE: cache/s3/adapter_test.go ================================================ //go:build !integration package s3 import ( "errors" "net/http" "net/url" "reflect" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var defaultTimeout = 1 * time.Hour const ( bucketName = "test" objectName = "key" bucketLocation = "location" ) func defaultCacheFactory() *cacheconfig.Config { return &cacheconfig.Config{ Type: "s3", S3: &cacheconfig.CacheS3Config{ ServerAddress: "server.com", AccessKey: "access", SecretKey: "key", BucketName: bucketName, BucketLocation: bucketLocation}, } } func defaultCacheFactoryEncryptionAES() *cacheconfig.Config { cacheConfig := defaultCacheFactory() cacheConfig.S3.ServerSideEncryption = "S3" return cacheConfig } func defaultCacheFactoryEncryptionKMS() *cacheconfig.Config { cacheConfig := defaultCacheFactory() cacheConfig.S3.ServerSideEncryption = "KMS" cacheConfig.S3.ServerSideEncryptionKeyID = "alias/my-key" return cacheConfig } type cacheOperationTest struct { errorOnMinioClientInitialization bool errorOnURLPresigning bool presignedURL *url.URL expectedURL *url.URL expectedUploadHeaders http.Header metadata map[string]string } func onFakeMinioURLGenerator(t *testing.T, tc cacheOperationTest) { client := newMockMinioClient(t) var err error if tc.errorOnURLPresigning { err = errors.New("test error") } client. On( "PresignHeader", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, ). Return(tc.presignedURL, err).Maybe() oldNewMinioURLGenerator := newMinioClient newMinioClient = func(s3 *cacheconfig.CacheS3Config) (minioClient, error) { if tc.errorOnMinioClientInitialization { return nil, errors.New("test error") } return client, nil } t.Cleanup(func() { newMinioClient = oldNewMinioURLGenerator }) } func testCacheOperation( t *testing.T, operationName string, operation func(adapter cache.Adapter) cache.PresignedURL, tc cacheOperationTest, cacheConfig *cacheconfig.Config, ) { t.Run(operationName, func(t *testing.T) { onFakeMinioURLGenerator(t, tc) adapter, err := New(cacheConfig, defaultTimeout, objectName) if tc.errorOnMinioClientInitialization { assert.EqualError(t, err, "error while creating S3 cache storage client: test error") return } require.NoError(t, err) adapter.WithMetadata(tc.metadata) u := operation(adapter) assert.Equal(t, tc.expectedURL, u.URL) uploadHeaders := u.Headers if operationName == "GetDownloadURL" || operationName == "GetHeadURL" { assert.Empty(t, uploadHeaders) } else { if tc.expectedUploadHeaders != nil { expectedUploadHeaders := tc.expectedUploadHeaders assert.Len(t, uploadHeaders, len(expectedUploadHeaders)) assert.True( t, reflect.DeepEqual(expectedUploadHeaders, uploadHeaders), "headers are not equal:\nexpected %q\nactual: %q", expectedUploadHeaders, uploadHeaders, ) } else { assert.Empty(t, uploadHeaders) } } goCloudURL, err := adapter.GetGoCloudURL(t.Context(), true) assert.NoError(t, err) assert.Nil(t, goCloudURL.URL) assert.Empty(t, goCloudURL.Environment) goCloudURL, err = adapter.GetGoCloudURL(t.Context(), false) assert.NoError(t, err) assert.Nil(t, goCloudURL.URL) assert.Empty(t, goCloudURL.Environment) }) } func TestCacheOperation(t *testing.T) { URL, err := url.Parse("https://s3.example.com") require.NoError(t, err) tests := map[string]cacheOperationTest{ "error-on-minio-client-initialization": { errorOnMinioClientInitialization: true, }, "error-on-presigning-url": { errorOnURLPresigning: true, presignedURL: URL, expectedURL: nil, }, "presigned-url": { presignedURL: URL, expectedURL: URL, }, "presigned-url-with-metadata": { presignedURL: URL, expectedURL: URL, metadata: map[string]string{"foo": "some foo"}, expectedUploadHeaders: http.Header{ "X-Amz-Meta-Foo": []string{"some foo"}, }, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { testCacheOperation( t, "GetDownloadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetDownloadURL(t.Context()) }, test, defaultCacheFactory(), ) testCacheOperation( t, "GetHeadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetHeadURL(t.Context()) }, test, defaultCacheFactory(), ) testCacheOperation( t, "GetUploadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetUploadURL(t.Context()) }, test, defaultCacheFactory(), ) }) } } func TestCacheOperationEncryptionAES(t *testing.T) { URL, err := url.Parse("https://s3.example.com") require.NoError(t, err) headers := http.Header{} headers.Add("X-Amz-Server-Side-Encryption", "AES256") tests := map[string]cacheOperationTest{ "error-on-minio-client-initialization": { errorOnMinioClientInitialization: true, expectedUploadHeaders: headers, }, "error-on-presigning-url": { errorOnURLPresigning: true, presignedURL: URL, expectedURL: nil, expectedUploadHeaders: nil, }, "presigned-url-aes": { presignedURL: URL, expectedURL: URL, expectedUploadHeaders: headers, }, "presigned-url-aes-with-metdata": { presignedURL: URL, expectedURL: URL, metadata: map[string]string{"foo": "some foo"}, expectedUploadHeaders: func() http.Header { h := headers.Clone() h["X-Amz-Meta-Foo"] = []string{"some foo"} return h }(), }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { testCacheOperation( t, "GetDownloadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetDownloadURL(t.Context()) }, test, defaultCacheFactoryEncryptionAES(), ) testCacheOperation( t, "GetHeadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetHeadURL(t.Context()) }, test, defaultCacheFactoryEncryptionAES(), ) testCacheOperation( t, "GetUploadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetUploadURL(t.Context()) }, test, defaultCacheFactoryEncryptionAES(), ) }) } } func TestCacheOperationEncryptionKMS(t *testing.T) { URL, err := url.Parse("https://s3.example.com") require.NoError(t, err) headers := http.Header{} headers.Add("X-Amz-Server-Side-Encryption", "aws:kms") headers.Add("X-Amz-Server-Side-Encryption-Aws-Kms-Key-Id", "alias/my-key") tests := map[string]cacheOperationTest{ "error-on-minio-client-initialization": { errorOnMinioClientInitialization: true, expectedUploadHeaders: nil, }, "error-on-presigning-url": { errorOnURLPresigning: true, presignedURL: URL, expectedURL: nil, expectedUploadHeaders: nil, }, "presigned-url-kms": { presignedURL: URL, expectedURL: URL, expectedUploadHeaders: headers, }, "presigned-url-kms-with-metadata": { presignedURL: URL, expectedURL: URL, metadata: map[string]string{"foo": "some foo"}, expectedUploadHeaders: func() http.Header { h := headers.Clone() h["X-Amz-Meta-Foo"] = []string{"some foo"} return h }(), }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { testCacheOperation( t, "GetDownloadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetDownloadURL(t.Context()) }, test, defaultCacheFactoryEncryptionKMS(), ) testCacheOperation( t, "GetHeadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetHeadURL(t.Context()) }, test, defaultCacheFactoryEncryptionKMS(), ) testCacheOperation( t, "GetUploadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetUploadURL(t.Context()) }, test, defaultCacheFactoryEncryptionKMS(), ) }) } } func TestNoConfiguration(t *testing.T) { s3Cache := defaultCacheFactory() s3Cache.S3 = nil adapter, err := New(s3Cache, defaultTimeout, objectName) assert.Nil(t, adapter) assert.EqualError(t, err, "missing S3 configuration") } ================================================ FILE: cache/s3/bucket_location_tripper.go ================================================ package s3 import ( "bytes" "encoding/xml" "io" "net/http" ) type bucketLocationTripper struct { bucketLocation string } // The Minio Golang library always attempts to query the bucket location and // currently has no way of statically setting that value. To avoid that // lookup, the Runner cache uses the library only to generate the URLs, // forgoing the library's API for uploading and downloading files. The custom // Roundtripper stubs out any network requests that would normally be made via // the library. func (b *bucketLocationTripper) RoundTrip(req *http.Request) (res *http.Response, err error) { var buffer bytes.Buffer err = xml.NewEncoder(&buffer).Encode(b.bucketLocation) if err != nil { return } res = &http.Response{ StatusCode: http.StatusOK, Body: io.NopCloser(&buffer), } return } func (b *bucketLocationTripper) CancelRequest(req *http.Request) { // Do nothing } ================================================ FILE: cache/s3/credentials_adapter.go ================================================ package s3 import ( "fmt" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type s3CredentialsAdapter struct { config *cacheconfig.CacheS3Config } func (a *s3CredentialsAdapter) GetCredentials() map[string]string { credMap := make(map[string]string) // For IAM instance profiles, Go Cloud will fetch the credentials with the AWS SDK. if a.config.AccessKey == "" || a.config.SecretKey == "" { return credMap } credMap["AWS_ACCESS_KEY_ID"] = a.config.AccessKey credMap["AWS_SECRET_ACCESS_KEY"] = a.config.SecretKey return credMap } func NewS3CredentialsAdapter(config *cacheconfig.Config) (cache.CredentialsAdapter, error) { s3 := config.S3 if s3 == nil { return nil, fmt.Errorf("missing S3 configuration") } a := &s3CredentialsAdapter{ config: s3, } return a, nil } func init() { err := cache.CredentialsFactories().Register("s3", NewS3CredentialsAdapter) if err != nil { panic(err) } } ================================================ FILE: cache/s3/credentials_adapter_test.go ================================================ //go:build !integration package s3 import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) func TestGetCredentials(t *testing.T) { tests := map[string]struct { s3 *cacheconfig.CacheS3Config expectedError string credsExpected bool }{ "static credentials": { s3: &cacheconfig.CacheS3Config{ BucketName: bucketName, AccessKey: "somekey", SecretKey: "somesecret", }, credsExpected: true, }, "no S3 credentials": { expectedError: `missing S3 configuration`, }, "empty access and secret key": { s3: &cacheconfig.CacheS3Config{ BucketName: bucketName, }, credsExpected: false, }, "empty access key": { s3: &cacheconfig.CacheS3Config{ BucketName: bucketName, SecretKey: "somesecret", }, credsExpected: false, }, "empty secret key": { s3: &cacheconfig.CacheS3Config{ BucketName: bucketName, AccessKey: "somekey", }, credsExpected: false, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { config := &cacheconfig.Config{S3: tt.s3} adapter, err := NewS3CredentialsAdapter(config) if tt.expectedError != "" { require.EqualError(t, err, tt.expectedError) } else { require.NoError(t, err) creds := adapter.GetCredentials() if tt.credsExpected { assert.Equal(t, 2, len(creds)) assert.Equal(t, tt.s3.AccessKey, creds["AWS_ACCESS_KEY_ID"]) assert.Equal(t, tt.s3.SecretKey, creds["AWS_SECRET_ACCESS_KEY"]) } else { assert.Empty(t, creds) } } }) } } ================================================ FILE: cache/s3/minio.go ================================================ package s3 import ( "context" "errors" "net/http" "net/url" "regexp" "strings" "time" "github.com/minio/minio-go/v7" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "github.com/minio/minio-go/v7/pkg/credentials" ) const DefaultAWSS3Server = "s3.amazonaws.com" var s3AcceleratePattern = regexp.MustCompile(`s3-accelerate.*\.amazonaws\.com$`) type minioClient interface { PresignHeader( ctx context.Context, method string, bucketName string, objectName string, expires time.Duration, reqParams url.Values, extraHeaders http.Header, ) (*url.URL, error) } var newMinio = minio.New var newMinioWithIAM = func(serverAddress, bucketLocation string) (*minio.Client, error) { return minio.New(serverAddress, &minio.Options{ Creds: credentials.NewIAM(""), Secure: true, Transport: &bucketLocationTripper{ bucketLocation: bucketLocation, }, }) } var newMinioClient = func(s3 *cacheconfig.CacheS3Config) (minioClient, error) { serverAddress := s3.ServerAddress if serverAddress == "" { serverAddress = DefaultAWSS3Server } var isS3AccelerateEndpoint = s3AcceleratePattern.MatchString(serverAddress) var s3AccelerateEndpoint string if isS3AccelerateEndpoint { s3AccelerateEndpoint = serverAddress serverAddress = strings.Replace(serverAddress, "s3-accelerate", "s3", 1) } var client *minio.Client var err error switch s3.AuthType() { case cacheconfig.S3AuthTypeIAM: client, err = newMinioWithIAM(serverAddress, s3.BucketLocation) case cacheconfig.S3AuthTypeAccessKey: client, err = newMinio(serverAddress, &minio.Options{ Creds: credentials.NewStaticV4(s3.AccessKey, s3.SecretKey, s3.SessionToken), Secure: !s3.Insecure, Transport: &bucketLocationTripper{ bucketLocation: s3.BucketLocation, }, }) default: return nil, errors.New("invalid s3 authentication type") } if err == nil && isS3AccelerateEndpoint { client.SetS3TransferAccelerate(s3AccelerateEndpoint) } return client, err } ================================================ FILE: cache/s3/minio_test.go ================================================ //go:build !integration package s3 import ( "errors" "net/http" "net/url" "testing" "time" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type minioClientInitializationTest struct { errorOnInitialization bool configurationFactory func() *cacheconfig.Config serverAddress string expectedToUseIAM bool expectedInsecure bool } func TestMinioClientInitialization(t *testing.T) { tests := map[string]minioClientInitializationTest{ "error-on-initialization": { errorOnInitialization: true, configurationFactory: defaultCacheFactory, }, "all-credentials-empty": { configurationFactory: emptyCredentialsCacheFactory, expectedToUseIAM: true, }, "serverAddress-empty": { configurationFactory: emptyServerAddressFactory, expectedToUseIAM: true, }, "accessKey-empty": { configurationFactory: emptyAccessKeyFactory, expectedToUseIAM: true, }, "secretKey-empty": { configurationFactory: emptySecretKeyFactory, expectedToUseIAM: true, }, "only-ServerAddress-defined": { configurationFactory: onlyServerAddressFactory, expectedToUseIAM: true, serverAddress: "s3.customurl.com", }, "only-AccessKey-defined": { configurationFactory: onlyAccessKeyFactory, expectedToUseIAM: true, }, "only-SecretKey-defined": { configurationFactory: onlySecretKeyFactory, expectedToUseIAM: true, }, "should-use-explicit-credentials": { configurationFactory: defaultCacheFactory, }, "should-use-explicit-credentials-with-insecure": { configurationFactory: insecureCacheFactory, expectedInsecure: true, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { cleanupMinioMock := runOnFakeMinio(t, test) defer cleanupMinioMock() cleanupMinioCredentialsMock := runOnFakeMinioWithCredentials(t, test) defer cleanupMinioCredentialsMock() cacheConfig := test.configurationFactory() client, err := newMinioClient(cacheConfig.S3) if test.errorOnInitialization { assert.Error(t, err, "test error") return } require.NoError(t, err) assert.NotNil(t, client) }) } } type minioClientInitializationTestS3Accelerate struct { serverAddress string endpointURL string targetURL string accelerated bool err error } func TestMinioClientInitializationWithAccelerate(t *testing.T) { tests := map[string]minioClientInitializationTestS3Accelerate{ "standard-accelerate-endpoint": { serverAddress: "s3-accelerate.amazonaws.com", endpointURL: "s3.amazonaws.com", targetURL: "foo.s3-accelerate.amazonaws.com", accelerated: true, }, "dualstack-region-endpoint": { serverAddress: "s3-accelerate.dualstack.us-east-1.amazonaws.com", endpointURL: "s3.dualstack.us-east-1.amazonaws.com", targetURL: "foo.s3-accelerate.dualstack.us-east-1.amazonaws.com", accelerated: true, }, "non-aws-endpoint": { serverAddress: "s3-accelerate.min.io", endpointURL: "s3-accelerate.min.io", targetURL: "s3-accelerate.min.io", }, "client-with-error": { serverAddress: "s3-accelerate.amazonaws.com", endpointURL: "s3.amazonaws.com", targetURL: "foo.s3-accelerate.amazonaws.com", accelerated: true, err: assert.AnError, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { cleanupMinioMock := runOnFakeMinioWithAccelerateEndpoint(t, test.accelerated, test.err) defer cleanupMinioMock() cacheConfig := serverAddressAccelerateFactory(test.serverAddress) cacheConfig.S3.AccessKey = "TOKEN" cacheConfig.S3.SecretKey = "TOKEN" client, err := newMinioClient(cacheConfig.S3) if test.err != nil { require.ErrorIs(t, err, test.err) return } require.NoError(t, err) require.NotNil(t, client) url, err := client.PresignHeader(t.Context(), "GET", "foo", "bar", time.Hour, url.Values{}, http.Header{}) require.NoError(t, err) assert.Equal(t, test.targetURL, url.Host) mc, ok := client.(*minio.Client) require.True(t, ok) assert.Equal(t, test.endpointURL, mc.EndpointURL().Host) }) } } func insecureCacheFactory() *cacheconfig.Config { cacheConfig := defaultCacheFactory() cacheConfig.S3.Insecure = true return cacheConfig } func emptyCredentialsCacheFactory() *cacheconfig.Config { cacheConfig := defaultCacheFactory() cacheConfig.S3.ServerAddress = "" cacheConfig.S3.AccessKey = "" cacheConfig.S3.SecretKey = "" return cacheConfig } func emptyServerAddressFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.AccessKey = "TOKEN" cacheConfig.S3.SecretKey = "TOKEN" return cacheConfig } func emptyAccessKeyFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.ServerAddress = "s3.amazonaws.com" cacheConfig.S3.SecretKey = "TOKEN" return cacheConfig } func emptySecretKeyFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.ServerAddress = "s3.amazonaws.com" cacheConfig.S3.AccessKey = "TOKEN" return cacheConfig } func onlyServerAddressFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.ServerAddress = "s3.customurl.com" return cacheConfig } func serverAddressAccelerateFactory(serverAddress string) *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.ServerAddress = serverAddress return cacheConfig } func onlyAccessKeyFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.AccessKey = "TOKEN" return cacheConfig } func onlySecretKeyFactory() *cacheconfig.Config { cacheConfig := emptyCredentialsCacheFactory() cacheConfig.S3.SecretKey = "TOKEN" return cacheConfig } func runOnFakeMinio(t *testing.T, test minioClientInitializationTest) func() { oldNewMinio := newMinio newMinio = func(endpoint string, opts *minio.Options) (*minio.Client, error) { if test.expectedToUseIAM { t.Error("Should not use regular minio client initializer") } if test.errorOnInitialization { return nil, errors.New("test error") } if test.expectedInsecure { assert.False(t, opts.Secure) } else { assert.True(t, opts.Secure) } client, err := minio.New(endpoint, opts) require.NoError(t, err) return client, nil } return func() { newMinio = oldNewMinio } } func runOnFakeMinioWithAccelerateEndpoint(t *testing.T, accelerated bool, err error) func() { oldNewMinio := newMinio newMinio = func(endpoint string, opts *minio.Options) (*minio.Client, error) { if accelerated { assert.NotContains(t, endpoint, "s3-accelerate") } if err != nil { return nil, err } return minio.New(endpoint, opts) } return func() { newMinio = oldNewMinio } } func runOnFakeMinioWithCredentials(t *testing.T, test minioClientInitializationTest) func() { oldNewMinioWithCredentials := newMinioWithIAM newMinioWithIAM = func(serverAddress, bucketLocation string) (*minio.Client, error) { if !test.expectedToUseIAM { t.Error("Should not use minio with IAM client initializator") } assert.Equal(t, "location", bucketLocation) if test.serverAddress == "" { assert.Equal(t, DefaultAWSS3Server, serverAddress) } else { assert.Equal(t, test.serverAddress, serverAddress) } if test.errorOnInitialization { return nil, errors.New("test error") } client, err := minio.New(serverAddress, &minio.Options{ Creds: credentials.NewIAM(""), Secure: true, Transport: &bucketLocationTripper{ bucketLocation: bucketLocation, }, }) require.NoError(t, err) return client, nil } return func() { newMinioWithIAM = oldNewMinioWithCredentials } } ================================================ FILE: cache/s3/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package s3 import ( "context" "net/http" "net/url" "time" mock "github.com/stretchr/testify/mock" ) // newMockMinioClient creates a new instance of mockMinioClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockMinioClient(t interface { mock.TestingT Cleanup(func()) }) *mockMinioClient { mock := &mockMinioClient{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockMinioClient is an autogenerated mock type for the minioClient type type mockMinioClient struct { mock.Mock } type mockMinioClient_Expecter struct { mock *mock.Mock } func (_m *mockMinioClient) EXPECT() *mockMinioClient_Expecter { return &mockMinioClient_Expecter{mock: &_m.Mock} } // PresignHeader provides a mock function for the type mockMinioClient func (_mock *mockMinioClient) PresignHeader(ctx context.Context, method string, bucketName string, objectName string, expires time.Duration, reqParams url.Values, extraHeaders http.Header) (*url.URL, error) { ret := _mock.Called(ctx, method, bucketName, objectName, expires, reqParams, extraHeaders) if len(ret) == 0 { panic("no return value specified for PresignHeader") } var r0 *url.URL var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, time.Duration, url.Values, http.Header) (*url.URL, error)); ok { return returnFunc(ctx, method, bucketName, objectName, expires, reqParams, extraHeaders) } if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, time.Duration, url.Values, http.Header) *url.URL); ok { r0 = returnFunc(ctx, method, bucketName, objectName, expires, reqParams, extraHeaders) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*url.URL) } } if returnFunc, ok := ret.Get(1).(func(context.Context, string, string, string, time.Duration, url.Values, http.Header) error); ok { r1 = returnFunc(ctx, method, bucketName, objectName, expires, reqParams, extraHeaders) } else { r1 = ret.Error(1) } return r0, r1 } // mockMinioClient_PresignHeader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PresignHeader' type mockMinioClient_PresignHeader_Call struct { *mock.Call } // PresignHeader is a helper method to define mock.On call // - ctx context.Context // - method string // - bucketName string // - objectName string // - expires time.Duration // - reqParams url.Values // - extraHeaders http.Header func (_e *mockMinioClient_Expecter) PresignHeader(ctx interface{}, method interface{}, bucketName interface{}, objectName interface{}, expires interface{}, reqParams interface{}, extraHeaders interface{}) *mockMinioClient_PresignHeader_Call { return &mockMinioClient_PresignHeader_Call{Call: _e.mock.On("PresignHeader", ctx, method, bucketName, objectName, expires, reqParams, extraHeaders)} } func (_c *mockMinioClient_PresignHeader_Call) Run(run func(ctx context.Context, method string, bucketName string, objectName string, expires time.Duration, reqParams url.Values, extraHeaders http.Header)) *mockMinioClient_PresignHeader_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 string if args[2] != nil { arg2 = args[2].(string) } var arg3 string if args[3] != nil { arg3 = args[3].(string) } var arg4 time.Duration if args[4] != nil { arg4 = args[4].(time.Duration) } var arg5 url.Values if args[5] != nil { arg5 = args[5].(url.Values) } var arg6 http.Header if args[6] != nil { arg6 = args[6].(http.Header) } run( arg0, arg1, arg2, arg3, arg4, arg5, arg6, ) }) return _c } func (_c *mockMinioClient_PresignHeader_Call) Return(uRL *url.URL, err error) *mockMinioClient_PresignHeader_Call { _c.Call.Return(uRL, err) return _c } func (_c *mockMinioClient_PresignHeader_Call) RunAndReturn(run func(ctx context.Context, method string, bucketName string, objectName string, expires time.Duration, reqParams url.Values, extraHeaders http.Header) (*url.URL, error)) *mockMinioClient_PresignHeader_Call { _c.Call.Return(run) return _c } ================================================ FILE: cache/s3v2/adapter.go ================================================ package s3v2 import ( "context" "fmt" "net/http" "net/url" "strings" "time" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type s3Adapter struct { timeout time.Duration config *cacheconfig.CacheS3Config objectName string client s3Presigner metadata map[string]string } func (a *s3Adapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { presignedURL, err := a.presignURL(ctx, http.MethodGet) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } return presignedURL } func (a *s3Adapter) GetHeadURL(ctx context.Context) cache.PresignedURL { presignedURL, err := a.presignURL(ctx, http.MethodHead) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } return presignedURL } func (a *s3Adapter) GetUploadURL(ctx context.Context) cache.PresignedURL { presignedURL, err := a.presignURL(ctx, http.MethodPut) if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{} } if len(a.metadata) > 0 { if presignedURL.Headers == nil { presignedURL.Headers = http.Header{} } for k, v := range a.metadata { presignedURL.Headers.Set("x-amz-meta-"+k, v) } } return presignedURL } func (a *s3Adapter) WithMetadata(metadata map[string]string) { a.metadata = metadata } func (a *s3Adapter) getARNForGoCloud(upload bool) string { if a.config.RoleARN != "" { return a.config.RoleARN } if upload && a.config.UploadRoleARN != "" { return a.config.UploadRoleARN } return "" } func (a *s3Adapter) GetGoCloudURL(ctx context.Context, upload bool) (cache.GoCloudURL, error) { goCloudURL := cache.GoCloudURL{} roleARN := a.getARNForGoCloud(upload) if roleARN == "" { return goCloudURL, nil } u := url.URL{ Scheme: "s3", Host: a.config.BucketName, Path: a.objectName, } q := u.Query() // These are GoCloud AWS SDK v2 query parameters: // https://github.com/google/go-cloud/blob/e5b1bc66f5c42c0a4bb43d179cefdab454559325/blob/s3blob/s3blob.go#L133-L136 // https://github.com/google/go-cloud/blob/e5b1bc66f5c42c0a4bb43d179cefdab454559325/aws/aws.go#L194-L199 q.Set("awssdk", "v2") if a.config.BucketLocation != "" { q.Set("region", a.config.BucketLocation) } endpoint := a.config.GetEndpoint() // We don't need to set the endpoint if the global S3 endpoint is used. // If we did, this may result in failures since AWS requires regional // endpoints to be used. if endpoint != "" && endpoint != DEFAULT_AWS_S3_ENDPOINT { q.Set("endpoint", a.config.GetEndpoint()) if a.config.PathStyleEnabled() { q.Set("hostname_immutable", "true") } } if a.config.PathStyleEnabled() { q.Set("use_path_style", "true") } if a.config.DualStackEnabled() { q.Set("dualstack", "true") } if a.config.Accelerate { q.Set("accelerate", "true") } ssetype := a.client.ServerSideEncryptionType() if ssetype != "" { q.Set("ssetype", ssetype) } if a.config.ServerSideEncryptionKeyID != "" { q.Set("kmskeyid", a.config.ServerSideEncryptionKeyID) } u.RawQuery = q.Encode() goCloudURL.URL = &u credentials, err := a.client.FetchCredentialsForRole( ctx, roleARN, a.config.BucketName, a.objectName, upload, a.timeout) if err != nil { return goCloudURL, err } goCloudURL.Environment = credentials return goCloudURL, nil } func (a *s3Adapter) presignURL(ctx context.Context, method string) (cache.PresignedURL, error) { if a.config.BucketName == "" { return cache.PresignedURL{}, fmt.Errorf("config BucketName cannot be empty") } if a.objectName == "" { return cache.PresignedURL{}, fmt.Errorf("object name cannot be empty") } return a.client.PresignURL(ctx, method, a.config.BucketName, a.objectName, a.metadata, a.timeout) } func New(config *cacheconfig.Config, timeout time.Duration, objectName string) (cache.Adapter, error) { s3Config := config.S3 if s3Config == nil { return nil, fmt.Errorf("missing S3 configuration") } client, err := newS3Client(s3Config) if err != nil { return nil, fmt.Errorf("error while creating S3 cache storage client: %w", err) } a := &s3Adapter{ config: s3Config, timeout: timeout, objectName: strings.TrimLeft(objectName, "/"), client: client, } return a, nil } func init() { err := cache.Factories().Register("s3v2", New) if err != nil { panic(err) } cache.RegisterCollector(assumeRoleInFlight) cache.RegisterCollector(assumeRoleWaitDuration) cache.RegisterCollector(assumeRoleCallDuration) cache.RegisterCollector(assumeRoleCredCacheHits) cache.RegisterCollector(assumeRoleCredCacheMisses) cache.RegisterCollector(assumeRoleCredCacheEntries) cache.RegisterCollector(assumeRoleFailures) } ================================================ FILE: cache/s3v2/adapter_test.go ================================================ //go:build !integration package s3v2 import ( "errors" "fmt" "net/http" "net/url" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) var defaultTimeout = 1 * time.Hour const ( bucketName = "test" objectName = "key" bucketLocation = "location" ) func defaultCacheFactory() *cacheconfig.Config { return &cacheconfig.Config{ Type: "s3v2", S3: &cacheconfig.CacheS3Config{ ServerAddress: "server.com", AccessKey: "access", SecretKey: "key", BucketName: bucketName, BucketLocation: bucketLocation}, } } type cacheOperationTest struct { errorOnS3ClientInitialization bool errorOnURLPresigning bool metadata map[string]string presignedURL *url.URL expectedURL *url.URL expectedUploadHeaders http.Header expectedDownloadHeaders http.Header } func onFakeS3URLGenerator(t *testing.T, tc cacheOperationTest) { client := newMockS3Presigner(t) var err error if tc.errorOnURLPresigning { err = errors.New("test error") } client. On( "PresignURL", mock.Anything, // context mock.Anything, // http method mock.Anything, // bucket name mock.Anything, // object name mock.Anything, // metadata mock.Anything, // valid time ). Return(cache.PresignedURL{URL: tc.presignedURL}, err).Maybe() oldS3URLGenerator := newS3Client newS3Client = func(s3 *cacheconfig.CacheS3Config, opts ...s3ClientOption) (s3Presigner, error) { if tc.errorOnS3ClientInitialization { return nil, errors.New("test error") } return client, nil } t.Cleanup(func() { newS3Client = oldS3URLGenerator }) } func testCacheOperation( t *testing.T, operationName string, operation func(adapter cache.Adapter) cache.PresignedURL, tc cacheOperationTest, cacheConfig *cacheconfig.Config, ) { t.Run(operationName, func(t *testing.T) { onFakeS3URLGenerator(t, tc) adapter, err := New(cacheConfig, defaultTimeout, objectName) if tc.errorOnS3ClientInitialization { assert.EqualError(t, err, "error while creating S3 cache storage client: test error") return } require.NoError(t, err) adapter.WithMetadata(tc.metadata) URL := operation(adapter) assert.Equal(t, tc.expectedURL, URL.URL) switch operationName { case "GetUploadURL": assert.Equal(t, tc.expectedUploadHeaders, URL.Headers, "upload headers") case "GetDownloadURL": assert.Equal(t, tc.expectedDownloadHeaders, URL.Headers, "download headers") default: // nothing to do (yet) } ctx := t.Context() goCloudURL, err := adapter.GetGoCloudURL(ctx, true) assert.NoError(t, err) assert.Nil(t, goCloudURL.URL) assert.Empty(t, goCloudURL.Environment) goCloudURL, err = adapter.GetGoCloudURL(ctx, false) assert.NoError(t, err) assert.Nil(t, goCloudURL.URL) assert.Empty(t, goCloudURL.Environment) }) } func TestCacheOperation(t *testing.T) { URL, err := url.Parse("https://s3.example.com") require.NoError(t, err) tests := map[string]cacheOperationTest{ "error-on-s3-client-initialization": { errorOnS3ClientInitialization: true, }, "error-on-presigning-url": { errorOnURLPresigning: true, presignedURL: URL, expectedURL: nil, }, "presigned-url": { presignedURL: URL, expectedURL: URL, }, "presigned-url-with-metadata": { presignedURL: URL, metadata: map[string]string{"foo": "some foo"}, expectedURL: URL, expectedUploadHeaders: http.Header{"X-Amz-Meta-Foo": []string{"some foo"}}, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { testCacheOperation( t, "GetDownloadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetDownloadURL(t.Context()) }, test, defaultCacheFactory(), ) testCacheOperation( t, "GetHeadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetHeadURL(t.Context()) }, test, defaultCacheFactory(), ) testCacheOperation( t, "GetUploadURL", func(adapter cache.Adapter) cache.PresignedURL { return adapter.GetUploadURL(t.Context()) }, test, defaultCacheFactory(), ) }) } } func TestNoConfiguration(t *testing.T) { s3Cache := defaultCacheFactory() s3Cache.S3 = nil adapter, err := New(s3Cache, defaultTimeout, objectName) assert.Nil(t, adapter) assert.EqualError(t, err, "missing S3 configuration") } func TestGoCloudURLWithRoleARN(t *testing.T) { enabled := true disabled := false roleARN := "aws:arn:role:1234" expectedCredentials := map[string]string{ "AWS_ACCESS_KEY_ID": "mock-access-key", "AWS_SECRET_ACCESS_KEY": "mock-secret-key", "AWS_SESSION_TOKEN": "mock-session-token", } tests := map[string]struct { objectName string config *cacheconfig.CacheS3Config expected string noCredentials bool failedFetch bool }{ "no role ARN": { config: defaultCacheFactory().S3, noCredentials: true, }, "role ARN": { config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", RoleARN: roleARN, }, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, "role ARN with leading slashes in object": { objectName: "//" + objectName, config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", RoleARN: roleARN, }, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, "global S3 endpoint": { config: &cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", BucketName: "custom-bucket", BucketLocation: "custom-location", RoleARN: roleARN, }, expected: "s3://custom-bucket/key?awssdk=v2&dualstack=true®ion=custom-location", }, "custom endpoint": { config: &cacheconfig.CacheS3Config{ ServerAddress: "custom.s3.endpoint.com", BucketName: "custom-bucket", BucketLocation: "custom-location", RoleARN: roleARN, }, expected: "s3://custom-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Fcustom.s3.endpoint.com&hostname_immutable=true®ion=custom-location&use_path_style=true", }, "path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "minio.example.com:8080", BucketName: "path-style-bucket", BucketLocation: "us-west-2", PathStyle: &enabled, RoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Fminio.example.com%3A8080&hostname_immutable=true®ion=us-west-2&use_path_style=true", }, "HTTP and path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "minio.example.com:8080", Insecure: true, BucketName: "path-style-bucket", BucketLocation: "us-west-2", PathStyle: &enabled, RoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=http%3A%2F%2Fminio.example.com%3A8080&hostname_immutable=true®ion=us-west-2&use_path_style=true", }, "S3 regional endpoint and path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "eu-north-1.s3.amazon.aws.com:443", BucketName: "path-style-bucket", BucketLocation: "eu-north-1", PathStyle: &enabled, RoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Feu-north-1.s3.amazon.aws.com&hostname_immutable=true®ion=eu-north-1&use_path_style=true", }, "dual stack disabled": { config: &cacheconfig.CacheS3Config{ BucketName: "dual-stack-bucket", BucketLocation: "eu-central-1", DualStack: &disabled, RoleARN: roleARN, }, expected: "s3://dual-stack-bucket/key?awssdk=v2®ion=eu-central-1", }, "accelerate": { config: &cacheconfig.CacheS3Config{ BucketName: "accelerate-bucket", BucketLocation: "us-east-1", Accelerate: true, RoleARN: roleARN, }, expected: "s3://accelerate-bucket/key?accelerate=true&awssdk=v2&dualstack=true®ion=us-east-1", }, "S3 encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", RoleARN: roleARN, ServerSideEncryption: "S3", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true®ion=ap-southeast-1&ssetype=AES256", }, "KMS encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", RoleARN: roleARN, ServerSideEncryption: "KMS", ServerSideEncryptionKeyID: "my-kms-key-id", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true&kmskeyid=my-kms-key-id®ion=ap-southeast-1&ssetype=aws%3Akms", }, "DSSE-KMS encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", RoleARN: roleARN, ServerSideEncryption: "DSSE-KMS", ServerSideEncryptionKeyID: "my-kms-key-id", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true&kmskeyid=my-kms-key-id®ion=ap-southeast-1&ssetype=aws%3Akms%3Adsse", }, "with failed credentials": { config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", RoleARN: roleARN, }, failedFetch: true, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, } for tn, tt := range tests { for _, uploadMode := range []bool{true, false} { t.Run(fmt.Sprintf("%s upload=%v", tn, uploadMode), func(t *testing.T) { onFakeS3URLGenerator(t, cacheOperationTest{}) s3Cache := defaultCacheFactory() s3Cache.S3 = tt.config if tt.objectName == "" { tt.objectName = objectName } adapter, err := New(s3Cache, defaultTimeout, tt.objectName) require.NoError(t, err) mockClient := adapter.(*s3Adapter).client.(*mockS3Presigner) mockClient.On("ServerSideEncryptionType").Return(s3EncryptionType(tt.config.EncryptionType())).Maybe() if tt.failedFetch { mockClient.On("FetchCredentialsForRole", mock.Anything, tt.config.RoleARN, tt.config.BucketName, mock.Anything, uploadMode, mock.Anything). Return(nil, errors.New("error fetching credentials")) } else { mockClient.On("FetchCredentialsForRole", mock.Anything, tt.config.RoleARN, tt.config.BucketName, mock.Anything, uploadMode, mock.Anything). Return(expectedCredentials, nil).Maybe() } u, err := adapter.GetGoCloudURL(t.Context(), uploadMode) if tt.failedFetch { assert.Error(t, err) } else { assert.NoError(t, err) } if tt.noCredentials || tt.failedFetch { assert.Empty(t, u.Environment) } else { assert.Equal(t, expectedCredentials, u.Environment) } if tt.expected != "" { assert.Equal(t, tt.expected, u.URL.String()) } else { assert.Nil(t, u.URL) } }) } } } func TestGoCloudURLWithUploadRoleARN(t *testing.T) { enabled := true disabled := false roleARN := "aws:arn:role:1234" expectedCredentials := map[string]string{ "AWS_ACCESS_KEY_ID": "mock-access-key", "AWS_SECRET_ACCESS_KEY": "mock-secret-key", "AWS_SESSION_TOKEN": "mock-session-token", } tests := map[string]struct { objectName string config *cacheconfig.CacheS3Config expected string noCredentials bool failedFetch bool }{ "no role ARN": { config: defaultCacheFactory().S3, noCredentials: true, }, "role ARN": { config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", UploadRoleARN: roleARN, }, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, "role ARN with leading slashes in object": { objectName: "//" + objectName, config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", UploadRoleARN: roleARN, }, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, "global S3 endpoint": { config: &cacheconfig.CacheS3Config{ ServerAddress: "s3.amazonaws.com", BucketName: "custom-bucket", BucketLocation: "custom-location", UploadRoleARN: roleARN, }, expected: "s3://custom-bucket/key?awssdk=v2&dualstack=true®ion=custom-location", }, "custom endpoint": { config: &cacheconfig.CacheS3Config{ ServerAddress: "custom.s3.endpoint.com", BucketName: "custom-bucket", BucketLocation: "custom-location", UploadRoleARN: roleARN, }, expected: "s3://custom-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Fcustom.s3.endpoint.com&hostname_immutable=true®ion=custom-location&use_path_style=true", }, "path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "minio.example.com:8080", BucketName: "path-style-bucket", BucketLocation: "us-west-2", PathStyle: &enabled, UploadRoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Fminio.example.com%3A8080&hostname_immutable=true®ion=us-west-2&use_path_style=true", }, "HTTP and path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "minio.example.com:8080", Insecure: true, BucketName: "path-style-bucket", BucketLocation: "us-west-2", PathStyle: &enabled, UploadRoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=http%3A%2F%2Fminio.example.com%3A8080&hostname_immutable=true®ion=us-west-2&use_path_style=true", }, "S3 regional endpoint and path style": { config: &cacheconfig.CacheS3Config{ ServerAddress: "eu-north-1.s3.amazon.aws.com:443", BucketName: "path-style-bucket", BucketLocation: "eu-north-1", PathStyle: &enabled, UploadRoleARN: roleARN, }, expected: "s3://path-style-bucket/key?awssdk=v2&dualstack=true&endpoint=https%3A%2F%2Feu-north-1.s3.amazon.aws.com&hostname_immutable=true®ion=eu-north-1&use_path_style=true", }, "dual stack disabled": { config: &cacheconfig.CacheS3Config{ BucketName: "dual-stack-bucket", BucketLocation: "eu-central-1", DualStack: &disabled, UploadRoleARN: roleARN, }, expected: "s3://dual-stack-bucket/key?awssdk=v2®ion=eu-central-1", }, "accelerate": { config: &cacheconfig.CacheS3Config{ BucketName: "accelerate-bucket", BucketLocation: "us-east-1", Accelerate: true, UploadRoleARN: roleARN, }, expected: "s3://accelerate-bucket/key?accelerate=true&awssdk=v2&dualstack=true®ion=us-east-1", }, "S3 encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", UploadRoleARN: roleARN, ServerSideEncryption: "S3", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true®ion=ap-southeast-1&ssetype=AES256", }, "KMS encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", UploadRoleARN: roleARN, ServerSideEncryption: "KMS", ServerSideEncryptionKeyID: "my-kms-key-id", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true&kmskeyid=my-kms-key-id®ion=ap-southeast-1&ssetype=aws%3Akms", }, "DSSE-KMS encryption": { config: &cacheconfig.CacheS3Config{ BucketName: "encrypted-bucket", BucketLocation: "ap-southeast-1", UploadRoleARN: roleARN, ServerSideEncryption: "DSSE-KMS", ServerSideEncryptionKeyID: "my-kms-key-id", }, expected: "s3://encrypted-bucket/key?awssdk=v2&dualstack=true&kmskeyid=my-kms-key-id®ion=ap-southeast-1&ssetype=aws%3Akms%3Adsse", }, "with failed credentials": { config: &cacheconfig.CacheS3Config{ BucketName: "role-bucket", BucketLocation: "us-west-1", UploadRoleARN: roleARN, }, failedFetch: true, expected: "s3://role-bucket/key?awssdk=v2&dualstack=true®ion=us-west-1", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { onFakeS3URLGenerator(t, cacheOperationTest{}) s3Cache := defaultCacheFactory() s3Cache.S3 = tt.config if tt.objectName == "" { tt.objectName = objectName } adapter, err := New(s3Cache, defaultTimeout, tt.objectName) require.NoError(t, err) mockClient := adapter.(*s3Adapter).client.(*mockS3Presigner) if !tt.noCredentials { mockClient.On("ServerSideEncryptionType").Return(s3EncryptionType(tt.config.EncryptionType())) if tt.failedFetch { mockClient.On("FetchCredentialsForRole", mock.Anything, tt.config.UploadRoleARN, tt.config.BucketName, mock.Anything, true, mock.Anything). Return(nil, errors.New("error fetching credentials")) } else { mockClient.On("FetchCredentialsForRole", mock.Anything, tt.config.UploadRoleARN, tt.config.BucketName, mock.Anything, true, mock.Anything). Return(expectedCredentials, nil) } } u, err := adapter.GetGoCloudURL(t.Context(), true) if tt.failedFetch { assert.Error(t, err) } else { assert.NoError(t, err) } if tt.noCredentials || tt.failedFetch { assert.Empty(t, u.Environment) } else { assert.Equal(t, expectedCredentials, u.Environment) } if tt.expected != "" { assert.Equal(t, tt.expected, u.URL.String()) } else { assert.Nil(t, u.URL) } du, err := adapter.GetGoCloudURL(t.Context(), false) require.NoError(t, err) assert.Nil(t, du.URL) assert.Empty(t, du.Environment) }) } } ================================================ FILE: cache/s3v2/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package s3v2 import ( "context" "time" mock "github.com/stretchr/testify/mock" "gitlab.com/gitlab-org/gitlab-runner/cache" ) // newMockS3Presigner creates a new instance of mockS3Presigner. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockS3Presigner(t interface { mock.TestingT Cleanup(func()) }) *mockS3Presigner { mock := &mockS3Presigner{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockS3Presigner is an autogenerated mock type for the s3Presigner type type mockS3Presigner struct { mock.Mock } type mockS3Presigner_Expecter struct { mock *mock.Mock } func (_m *mockS3Presigner) EXPECT() *mockS3Presigner_Expecter { return &mockS3Presigner_Expecter{mock: &_m.Mock} } // FetchCredentialsForRole provides a mock function for the type mockS3Presigner func (_mock *mockS3Presigner) FetchCredentialsForRole(ctx context.Context, roleARN string, bucketName string, objectName string, upload bool, timeout time.Duration) (map[string]string, error) { ret := _mock.Called(ctx, roleARN, bucketName, objectName, upload, timeout) if len(ret) == 0 { panic("no return value specified for FetchCredentialsForRole") } var r0 map[string]string var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, bool, time.Duration) (map[string]string, error)); ok { return returnFunc(ctx, roleARN, bucketName, objectName, upload, timeout) } if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, bool, time.Duration) map[string]string); ok { r0 = returnFunc(ctx, roleARN, bucketName, objectName, upload, timeout) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(map[string]string) } } if returnFunc, ok := ret.Get(1).(func(context.Context, string, string, string, bool, time.Duration) error); ok { r1 = returnFunc(ctx, roleARN, bucketName, objectName, upload, timeout) } else { r1 = ret.Error(1) } return r0, r1 } // mockS3Presigner_FetchCredentialsForRole_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'FetchCredentialsForRole' type mockS3Presigner_FetchCredentialsForRole_Call struct { *mock.Call } // FetchCredentialsForRole is a helper method to define mock.On call // - ctx context.Context // - roleARN string // - bucketName string // - objectName string // - upload bool // - timeout time.Duration func (_e *mockS3Presigner_Expecter) FetchCredentialsForRole(ctx interface{}, roleARN interface{}, bucketName interface{}, objectName interface{}, upload interface{}, timeout interface{}) *mockS3Presigner_FetchCredentialsForRole_Call { return &mockS3Presigner_FetchCredentialsForRole_Call{Call: _e.mock.On("FetchCredentialsForRole", ctx, roleARN, bucketName, objectName, upload, timeout)} } func (_c *mockS3Presigner_FetchCredentialsForRole_Call) Run(run func(ctx context.Context, roleARN string, bucketName string, objectName string, upload bool, timeout time.Duration)) *mockS3Presigner_FetchCredentialsForRole_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 string if args[2] != nil { arg2 = args[2].(string) } var arg3 string if args[3] != nil { arg3 = args[3].(string) } var arg4 bool if args[4] != nil { arg4 = args[4].(bool) } var arg5 time.Duration if args[5] != nil { arg5 = args[5].(time.Duration) } run( arg0, arg1, arg2, arg3, arg4, arg5, ) }) return _c } func (_c *mockS3Presigner_FetchCredentialsForRole_Call) Return(stringToString map[string]string, err error) *mockS3Presigner_FetchCredentialsForRole_Call { _c.Call.Return(stringToString, err) return _c } func (_c *mockS3Presigner_FetchCredentialsForRole_Call) RunAndReturn(run func(ctx context.Context, roleARN string, bucketName string, objectName string, upload bool, timeout time.Duration) (map[string]string, error)) *mockS3Presigner_FetchCredentialsForRole_Call { _c.Call.Return(run) return _c } // PresignURL provides a mock function for the type mockS3Presigner func (_mock *mockS3Presigner) PresignURL(ctx context.Context, method string, bucketName string, objectName string, metadata map[string]string, expires time.Duration) (cache.PresignedURL, error) { ret := _mock.Called(ctx, method, bucketName, objectName, metadata, expires) if len(ret) == 0 { panic("no return value specified for PresignURL") } var r0 cache.PresignedURL var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, map[string]string, time.Duration) (cache.PresignedURL, error)); ok { return returnFunc(ctx, method, bucketName, objectName, metadata, expires) } if returnFunc, ok := ret.Get(0).(func(context.Context, string, string, string, map[string]string, time.Duration) cache.PresignedURL); ok { r0 = returnFunc(ctx, method, bucketName, objectName, metadata, expires) } else { r0 = ret.Get(0).(cache.PresignedURL) } if returnFunc, ok := ret.Get(1).(func(context.Context, string, string, string, map[string]string, time.Duration) error); ok { r1 = returnFunc(ctx, method, bucketName, objectName, metadata, expires) } else { r1 = ret.Error(1) } return r0, r1 } // mockS3Presigner_PresignURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PresignURL' type mockS3Presigner_PresignURL_Call struct { *mock.Call } // PresignURL is a helper method to define mock.On call // - ctx context.Context // - method string // - bucketName string // - objectName string // - metadata map[string]string // - expires time.Duration func (_e *mockS3Presigner_Expecter) PresignURL(ctx interface{}, method interface{}, bucketName interface{}, objectName interface{}, metadata interface{}, expires interface{}) *mockS3Presigner_PresignURL_Call { return &mockS3Presigner_PresignURL_Call{Call: _e.mock.On("PresignURL", ctx, method, bucketName, objectName, metadata, expires)} } func (_c *mockS3Presigner_PresignURL_Call) Run(run func(ctx context.Context, method string, bucketName string, objectName string, metadata map[string]string, expires time.Duration)) *mockS3Presigner_PresignURL_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 string if args[2] != nil { arg2 = args[2].(string) } var arg3 string if args[3] != nil { arg3 = args[3].(string) } var arg4 map[string]string if args[4] != nil { arg4 = args[4].(map[string]string) } var arg5 time.Duration if args[5] != nil { arg5 = args[5].(time.Duration) } run( arg0, arg1, arg2, arg3, arg4, arg5, ) }) return _c } func (_c *mockS3Presigner_PresignURL_Call) Return(presignedURL cache.PresignedURL, err error) *mockS3Presigner_PresignURL_Call { _c.Call.Return(presignedURL, err) return _c } func (_c *mockS3Presigner_PresignURL_Call) RunAndReturn(run func(ctx context.Context, method string, bucketName string, objectName string, metadata map[string]string, expires time.Duration) (cache.PresignedURL, error)) *mockS3Presigner_PresignURL_Call { _c.Call.Return(run) return _c } // ServerSideEncryptionType provides a mock function for the type mockS3Presigner func (_mock *mockS3Presigner) ServerSideEncryptionType() string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for ServerSideEncryptionType") } var r0 string if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } return r0 } // mockS3Presigner_ServerSideEncryptionType_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ServerSideEncryptionType' type mockS3Presigner_ServerSideEncryptionType_Call struct { *mock.Call } // ServerSideEncryptionType is a helper method to define mock.On call func (_e *mockS3Presigner_Expecter) ServerSideEncryptionType() *mockS3Presigner_ServerSideEncryptionType_Call { return &mockS3Presigner_ServerSideEncryptionType_Call{Call: _e.mock.On("ServerSideEncryptionType")} } func (_c *mockS3Presigner_ServerSideEncryptionType_Call) Run(run func()) *mockS3Presigner_ServerSideEncryptionType_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockS3Presigner_ServerSideEncryptionType_Call) Return(s string) *mockS3Presigner_ServerSideEncryptionType_Call { _c.Call.Return(s) return _c } func (_c *mockS3Presigner_ServerSideEncryptionType_Call) RunAndReturn(run func() string) *mockS3Presigner_ServerSideEncryptionType_Call { _c.Call.Return(run) return _c } ================================================ FILE: cache/s3v2/s3.go ================================================ package s3v2 import ( "context" "fmt" "net/http" "net/url" "os" "strings" "sync" "time" "github.com/hashicorp/golang-lru/v2/expirable" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/helpers" "github.com/aws/aws-sdk-go-v2/aws" v4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4" "github.com/aws/aws-sdk-go-v2/config" "github.com/aws/aws-sdk-go-v2/credentials" "github.com/aws/aws-sdk-go-v2/service/s3" "github.com/aws/aws-sdk-go-v2/service/s3/types" "github.com/aws/aws-sdk-go-v2/service/sts" "github.com/prometheus/client_golang/prometheus" "github.com/sirupsen/logrus" ) const DEFAULT_AWS_S3_ENDPOINT = "https://s3.amazonaws.com" const fallbackBucketLocation = "us-east-1" const defaultAssumeRoleMaxConcurrency = 5 var assumeRoleInFlight = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "gitlab_runner_cache_s3_assume_role_requests_in_flight", Help: "Number of AssumeRole requests to AWS STS in progress.", }) var assumeRoleWaitDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ Name: "gitlab_runner_cache_s3_assume_role_wait_seconds", Help: "Wait time to acquire a concurrency slot before an AssumeRole request.", Buckets: []float64{0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10}, }) var assumeRoleCallDuration = prometheus.NewHistogram(prometheus.HistogramOpts{ Name: "gitlab_runner_cache_s3_assume_role_duration_seconds", Help: "Duration of AssumeRole API calls to AWS STS.", Buckets: []float64{0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10, 30}, }) var assumeRoleCredCacheHits = prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_cache_s3_assume_role_cache_hits_total", Help: "Number of AssumeRole credential cache hits.", }) var assumeRoleCredCacheMisses = prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_cache_s3_assume_role_cache_misses_total", Help: "Number of AssumeRole credential cache misses (This is also a count of the STS calls for cache credentials that were made).", }) var assumeRoleCredCacheEntries = prometheus.NewGaugeFunc(prometheus.GaugeOpts{ Name: "gitlab_runner_cache_s3_assume_role_cached_credentials", Help: "Current number of AssumeRole credentials held in the LRU cache.", }, func() float64 { return float64(assumeRoleCredCache.Len()) }) var assumeRoleFailures = prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_cache_s3_assume_role_failures_total", Help: "Number of AssumeRole requests which failed.", }) // assumeRoleCredCacheSize is the maximum number of AssumeRole credentials held // in the cache. Each entry is a small map of four env-var strings (~200 B). // 1 000 entries ≈ 200 KB — sufficient for instance runners serving hundreds // of projects with multiple cache keys each. const assumeRoleCredCacheSize = 1000 // assumeRoleCredCacheTTL is the LRU eviction TTL. It matches the maximum // AssumeRole session duration (1 hour), so the LRU's built-in background // sweep (runs every TTL/100 ≈ 36 s) cleans up entries that were never // accessed again after their credential expired. const assumeRoleCredCacheTTL = time.Hour // assumeRoleCredCache caches AssumeRole credentials keyed by // (roleARN, bucketName, objectName, upload). The objectName is deterministic // (runner//project//), so concurrent jobs sharing the // same cache key reuse the same credentials without extra STS calls. // // The expirable.LRU provides two independent eviction mechanisms: // - LRU cap: evicts the least-recently-used entry when the cache is full. // - TTL: evicts entries 1 hour after insertion via a background goroutine. // // A per-entry expiresAt field is still checked on read so that credentials // with less remaining validity than required are never returned. var assumeRoleCredCache = expirable.NewLRU[string, cachedCredential]( assumeRoleCredCacheSize, nil, assumeRoleCredCacheTTL, ) type cachedCredential struct { creds map[string]string expiresAt time.Time } // assumeRoleCacheKey returns a cache key for a set of AssumeRole parameters. func assumeRoleCacheKey(roleARN, bucketName, objectName string, upload bool) string { uploadStr := "0" if upload { uploadStr = "1" } return roleARN + "\x00" + bucketName + "\x00" + objectName + "\x00" + uploadStr } // FlushCredentialCache evicts all cached AssumeRole credentials, forcing the // next call for each key to issue a fresh STS request. Use this when a // credential is known to be compromised or after a configuration change. func FlushCredentialCache() { assumeRoleCredCache.Purge() } type s3Presigner interface { PresignURL( ctx context.Context, method string, bucketName string, objectName string, metadata map[string]string, expires time.Duration, ) (cache.PresignedURL, error) FetchCredentialsForRole(ctx context.Context, roleARN, bucketName, objectName string, upload bool, timeout time.Duration) (map[string]string, error) ServerSideEncryptionType() string } type s3Client struct { s3Config *cacheconfig.CacheS3Config awsConfig *aws.Config client *s3.Client presignClient *s3.PresignClient stsEndpoint string assumeRoleSem chan struct{} disableCredCache bool } type s3ClientOption func(*s3Client) func withSTSEndpoint(endpoint string) s3ClientOption { return func(c *s3Client) { c.stsEndpoint = endpoint } } func withAssumeRoleSem(sem chan struct{}) s3ClientOption { return func(c *s3Client) { c.assumeRoleSem = sem } } func (c *s3Client) PresignURL(ctx context.Context, method string, bucketName string, objectName string, metadata map[string]string, expires time.Duration) (cache.PresignedURL, error) { var presignedReq *v4.PresignedHTTPRequest var err error switch method { case http.MethodGet: getObjectInput := &s3.GetObjectInput{ Bucket: aws.String(bucketName), Key: aws.String(objectName), } presignedReq, err = c.presignClient.PresignGetObject(ctx, getObjectInput, s3.WithPresignExpires(expires)) case http.MethodHead: headObjectInput := &s3.HeadObjectInput{ Bucket: aws.String(bucketName), Key: aws.String(objectName), } presignedReq, err = c.presignClient.PresignHeadObject(ctx, headObjectInput, s3.WithPresignExpires(expires)) case http.MethodPut: putObjectInput := &s3.PutObjectInput{ Bucket: aws.String(bucketName), Key: aws.String(objectName), } if len(metadata) > 0 { putObjectInput.Metadata = metadata } switch c.s3Config.EncryptionType() { case cacheconfig.S3EncryptionTypeAes256: putObjectInput.ServerSideEncryption = types.ServerSideEncryptionAes256 case cacheconfig.S3EncryptionTypeKms: putObjectInput.ServerSideEncryption = types.ServerSideEncryptionAwsKms putObjectInput.SSEKMSKeyId = aws.String(c.s3Config.ServerSideEncryptionKeyID) case cacheconfig.S3EncryptionTypeDsseKms: putObjectInput.ServerSideEncryption = types.ServerSideEncryptionAwsKmsDsse putObjectInput.SSEKMSKeyId = aws.String(c.s3Config.ServerSideEncryptionKeyID) } presignedReq, err = c.presignClient.PresignPutObject(ctx, putObjectInput, s3.WithPresignExpires(expires)) default: return cache.PresignedURL{}, fmt.Errorf("unsupported method: %s", method) } if err != nil { logrus.WithError(err).Error("error while generating S3 pre-signed URL") return cache.PresignedURL{}, err } u, err := url.Parse(presignedReq.URL) if err != nil { logrus.WithError(err).WithField("url", presignedReq.URL).Errorf("error parsing S3 URL") return cache.PresignedURL{}, err } return cache.PresignedURL{URL: u, Headers: presignedReq.SignedHeader}, nil } func (c *s3Client) generateSessionPolicy(bucketName, objectName string, upload bool) string { action := "s3:GetObject" if upload { action = "s3:PutObject" } // https://docs.aws.amazon.com/IAM/latest/UserGuide/reference-arns.html s3Partition := "aws" // https://docs.aws.amazon.com/govcloud-us/latest/UserGuide/using-govcloud-arns.html switch { case strings.HasPrefix(c.awsConfig.Region, "us-gov-"): s3Partition = "aws-us-gov" case strings.HasPrefix(c.awsConfig.Region, "cn-"): s3Partition = "aws-cn" } policy := fmt.Sprintf(`{ "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": ["%s"], "Resource": "arn:%s:s3:::%s/%s" }`, action, s3Partition, bucketName, objectName) if c.s3Config.EncryptionType() == cacheconfig.S3EncryptionTypeKms || c.s3Config.EncryptionType() == cacheconfig.S3EncryptionTypeDsseKms { // Permissions needed for multipart upload: https://repost.aws/knowledge-center/s3-large-file-encryption-kms-key policy += fmt.Sprintf(`, { "Effect": "Allow", "Action": [ "kms:Decrypt", "kms:GenerateDataKey" ], "Resource": "%s" }`, c.s3Config.ServerSideEncryptionKeyID) } policy += ` ] }` return policy } // cachedCreds returns credentials from the cache if they have at least // minValidity of remaining lifetime. Returns (nil, false) on a cache miss, // a disabled cache, or insufficient remaining validity. func (c *s3Client) cachedCreds(credKey string, minValidity time.Duration) (map[string]string, bool) { if c.disableCredCache { return nil, false } cached, ok := assumeRoleCredCache.Get(credKey) if !ok || time.Until(cached.expiresAt) < minValidity { return nil, false } assumeRoleCredCacheHits.Inc() return cached.creds, true } // acquireAssumeRoleSem acquires a slot in the concurrency semaphore and // returns a release function. If no semaphore is configured the release // function is a no-op. Returns an error if ctx is cancelled while waiting. func (c *s3Client) acquireAssumeRoleSem(ctx context.Context) (func(), error) { if c.assumeRoleSem == nil { return func() {}, nil } waitStart := time.Now() select { case c.assumeRoleSem <- struct{}{}: assumeRoleWaitDuration.Observe(time.Since(waitStart).Seconds()) assumeRoleInFlight.Inc() return func() { <-c.assumeRoleSem assumeRoleInFlight.Dec() }, nil case <-ctx.Done(): return nil, fmt.Errorf("context cancelled waiting for AssumeRole semaphore: %w", ctx.Err()) } } func (c *s3Client) FetchCredentialsForRole(ctx context.Context, roleARN, bucketName, objectName string, upload bool, timeout time.Duration) (map[string]string, error) { // minValidity is the minimum remaining lifetime a cached credential must // have to be considered usable. We want credentials to remain valid for // the entire transfer (at least `timeout`), but cap at 55 minutes so // that cache hits are always possible within the 1-hour session lifetime, // regardless of how large `timeout` is configured. minValidity := min(max(timeout, time.Minute), 55*time.Minute) credKey := assumeRoleCacheKey(roleARN, bucketName, objectName, upload) // Fast path: return cached credentials without touching the semaphore. if creds, ok := c.cachedCreds(credKey, minValidity); ok { return creds, nil } sessionPolicy := c.generateSessionPolicy(bucketName, objectName, upload) stsClient := sts.NewFromConfig(*c.awsConfig, func(o *sts.Options) { if c.stsEndpoint != "" { o.BaseEndpoint = aws.String(c.stsEndpoint) } }) uuid, err := helpers.GenerateRandomUUID(8) if err != nil { return nil, fmt.Errorf("failed to generate random UUID: %w", err) } sessionName := fmt.Sprintf("gitlab-runner-cache-upload-%s", uuid) // Request the maximum allowed session duration. Credentials are cached // and reused across jobs, so a longer session duration means more cache // hits. According to https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_manage-assume.html#id_roles_use_view-role-max-session, // session durations must be between 15 minutes and 12 hours; when role // chaining is in use, AWS limits this to 1 hour. const duration = 1 * time.Hour release, err := c.acquireAssumeRoleSem(ctx) if err != nil { return nil, err } defer release() // Double-check cache after acquiring the semaphore slot. A concurrent // goroutine may have fetched and cached credentials for the same key // while we were waiting. if creds, ok := c.cachedCreds(credKey, minValidity); ok { return creds, nil } assumeRoleCredCacheMisses.Inc() startTime := time.Now() roleCredentials, err := stsClient.AssumeRole(ctx, &sts.AssumeRoleInput{ RoleArn: aws.String(roleARN), RoleSessionName: aws.String(sessionName), Policy: aws.String(sessionPolicy), // Limit the role's access DurationSeconds: aws.Int32(int32(duration.Seconds())), }) elapsed := time.Since(startTime).Seconds() assumeRoleCallDuration.Observe(elapsed) if err != nil { assumeRoleFailures.Inc() logrus.WithError(err).WithFields(logrus.Fields{ "role_arn": roleARN, "duration_s": elapsed, }).Error("Failed to assume role for cache credentials") return nil, fmt.Errorf("failed to assume role (took %.2fs): %w", elapsed, err) } // AssumeRole should always return credentials if successful, but // just in case it doesn't let's check this. if roleCredentials.Credentials == nil { assumeRoleFailures.Inc() logrus.WithFields(logrus.Fields{ "role_arn": roleARN, "duration_s": elapsed, }).Error("AssumeRole succeeded but returned no credentials") return nil, fmt.Errorf("failed to retrieve credentials (took %.2fs): %w", elapsed, err) } logrus.WithFields(logrus.Fields{ "role_arn": roleARN, "duration_s": elapsed, }).Debug("Successfully assumed role for cache credentials") creds := map[string]string{ "AWS_ACCESS_KEY_ID": *roleCredentials.Credentials.AccessKeyId, "AWS_SECRET_ACCESS_KEY": *roleCredentials.Credentials.SecretAccessKey, "AWS_SESSION_TOKEN": *roleCredentials.Credentials.SessionToken, "AWS_PROFILE": "", // Ignore user-defined values } // Cache only when the response includes an expiration. This is always // the case for AssumeRole, but we guard defensively to avoid storing // credentials that we cannot expire correctly. if !c.disableCredCache && roleCredentials.Credentials.Expiration != nil { assumeRoleCredCache.Add(credKey, cachedCredential{ creds: creds, expiresAt: *roleCredentials.Credentials.Expiration, }) } return creds, nil } func (c *s3Client) ServerSideEncryptionType() string { return s3EncryptionType(c.s3Config.EncryptionType()) } func s3EncryptionType(encryptionType cacheconfig.S3EncryptionType) string { switch encryptionType { case cacheconfig.S3EncryptionTypeAes256: return string(types.ServerSideEncryptionAes256) case cacheconfig.S3EncryptionTypeKms: return string(types.ServerSideEncryptionAwsKms) case cacheconfig.S3EncryptionTypeDsseKms: return string(types.ServerSideEncryptionAwsKmsDsse) default: return "" } } func newRawS3Client(s3Config *cacheconfig.CacheS3Config) (*aws.Config, *s3.Client, error) { var cfg aws.Config var err error options := make([]func(*config.LoadOptions) error, 0) endpoint := s3Config.GetEndpoint() switch s3Config.AuthType() { case cacheconfig.S3AuthTypeIAM: break case cacheconfig.S3AuthTypeAccessKey: options = append(options, config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(s3Config.AccessKey, s3Config.SecretKey, s3Config.SessionToken)), ) } bucketLocation := s3Config.BucketLocation if bucketLocation == "" { bucketLocation = detectBucketLocation(s3Config, options...) } options = append(options, config.WithRegion(bucketLocation)) // AWS SDK Go v2 service/s3 v1.73.0 changed the defaults for both // RequestChecksumCalculation and ResponseChecksumValidation from // WhenRequired to WhenSupported. ResponseChecksumValidation=WhenSupported // causes the SDK to inject "X-Amz-Checksum-Mode: ENABLED" as a signed // header into every GetObject request. Third-party S3-compatible providers // that don't recognize this header compute a different signature, causing // SignatureDoesNotMatch errors on downloads and on presigned GET URLs. // For custom (non-AWS) endpoints, apply WhenRequired defaults unless the // user has explicitly configured the env vars to override this behavior. if endpoint != "" && endpoint != DEFAULT_AWS_S3_ENDPOINT { if os.Getenv("AWS_RESPONSE_CHECKSUM_VALIDATION") == "" { options = append(options, config.WithResponseChecksumValidation(aws.ResponseChecksumValidationWhenRequired)) } if os.Getenv("AWS_REQUEST_CHECKSUM_CALCULATION") == "" { options = append(options, config.WithRequestChecksumCalculation(aws.RequestChecksumCalculationWhenRequired)) } } ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() cfg, err = config.LoadDefaultConfig(ctx, options...) if err != nil { return nil, nil, err } client := s3.NewFromConfig(cfg, func(o *s3.Options) { // To preserve backwards compatibility, configs that set ServerAddress to // "s3.amazonaws.com" don't need a custom endpoint since that is the default // S3 address. // // The AWS SDK doesn't allow you to generate a pre-signed URL with a custom endpoint // and DualStack or Accelerate options set. if endpoint != "" && endpoint != DEFAULT_AWS_S3_ENDPOINT { o.BaseEndpoint = aws.String(endpoint) } else { o.UseDualstack = s3Config.DualStackEnabled() // nolint:staticcheck o.UseAccelerate = s3Config.Accelerate } o.UsePathStyle = s3Config.PathStyleEnabled() }) return &cfg, client, nil } func detectBucketLocation(s3Config *cacheconfig.CacheS3Config, optFuncs ...func(*config.LoadOptions) error) string { // The 30 seconds timeout here is arbritrary ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() // When s3 is configured with an IAM profile, a default region must be set // We therefore set the default region to us-east-1 configOpts := append( []func(*config.LoadOptions) error{ config.WithRegion(fallbackBucketLocation), }, optFuncs..., ) cfg, err := config.LoadDefaultConfig(ctx, configOpts...) if err != nil { return fallbackBucketLocation } endpoint := s3Config.GetEndpoint() effectiveEndpoint := DEFAULT_AWS_S3_ENDPOINT client := s3.NewFromConfig(cfg, func(o *s3.Options) { if endpoint != "" && endpoint != DEFAULT_AWS_S3_ENDPOINT { o.BaseEndpoint = aws.String(endpoint) effectiveEndpoint = endpoint } o.UsePathStyle = s3Config.PathStyleEnabled() }) output, err := client.GetBucketLocation(ctx, &s3.GetBucketLocationInput{ Bucket: aws.String(s3Config.BucketName), }) logEntry := logrus.WithFields(logrus.Fields{ "endpoint": effectiveEndpoint, "bucket": s3Config.BucketName, }) if err != nil { logEntry.WithError(err).Warning("Failed to detect S3 bucket location, falling back to default region") return fallbackBucketLocation } location := string(output.LocationConstraint) switch output.LocationConstraint { case "": location = fallbackBucketLocation case types.BucketLocationConstraintEu: location = string(types.BucketLocationConstraintEuWest1) } logEntry.WithField("location", location).Debug("Successfully detected S3 bucket location") return location } // clientInit holds a lazily-built s3Client. sync.Once ensures that concurrent // callers for the same s3Config pointer share a single buildS3Client call. type clientInit struct { once sync.Once client s3Presigner err error } // s3ClientCache maps *cacheconfig.CacheS3Config → *clientInit. var s3ClientCache sync.Map // buildS3Client constructs a new s3Client without any caching. func buildS3Client(s3Config *cacheconfig.CacheS3Config, options ...s3ClientOption) (s3Presigner, error) { cfg, client, err := newRawS3Client(s3Config) if err != nil { return nil, err } presignClient := s3.NewPresignClient(client) concurrency := s3Config.AssumeRoleMaxConcurrency var assumeRoleSem chan struct{} switch { case concurrency == 0: assumeRoleSem = make(chan struct{}, defaultAssumeRoleMaxConcurrency) case concurrency > 0: assumeRoleSem = make(chan struct{}, concurrency) // concurrency < 0: nil channel, semaphore disabled } c := &s3Client{ s3Config: s3Config, awsConfig: cfg, client: client, presignClient: presignClient, assumeRoleSem: assumeRoleSem, disableCredCache: s3Config.DisableAssumeRoleCredentialsCaching, } for _, opt := range options { opt(c) } return c, nil } // newS3Client returns a cached s3Client for the given config when possible. // // The s3Config pointer is used as the cache key. Each config load allocates a // fresh CacheS3Config (TOML unmarshal creates new objects), so pointer identity // naturally captures both "which runner" and "which load": after a config // reload the pointer changes and the old entry is never matched again. // // Caching is skipped when options are provided (options such as withSTSEndpoint // mutate the client and must not be shared across callers). // // sync.Once inside clientInit ensures that concurrent callers sharing the same // s3Config pointer issue only one newRawS3Client call (and therefore one IMDS // request) even during the initial population or after a reload. var newS3Client = func(s3Config *cacheconfig.CacheS3Config, options ...s3ClientOption) (s3Presigner, error) { if len(options) > 0 { return buildS3Client(s3Config, options...) } init := &clientInit{} actual, _ := s3ClientCache.LoadOrStore(s3Config, init) ci, ok := actual.(*clientInit) if !ok { return buildS3Client(s3Config) } ci.once.Do(func() { ci.client, ci.err = buildS3Client(s3Config) if ci.err != nil { s3ClientCache.CompareAndDelete(s3Config, ci) } }) return ci.client, ci.err } ================================================ FILE: cache/s3v2/s3_test.go ================================================ //go:build !integration package s3v2 import ( "bytes" "context" "encoding/json" "fmt" "io" "net/http" "net/http/httptest" "net/url" "sync" "sync/atomic" "testing" "time" "github.com/aws/aws-sdk-go-v2/aws" "github.com/aws/aws-sdk-go-v2/service/s3" dto "github.com/prometheus/client_model/go" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "github.com/johannesboyne/gofakes3" "github.com/johannesboyne/gofakes3/backend/s3mem" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/testutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) type sessionPolicy struct { Version string `json:"Version"` Statement []policyStatement `json:"Statement"` } type policyStatement struct { Effect string `json:"Effect"` Action []string `json:"Action"` Resource string `json:"Resource"` } func setupMockS3Server(t *testing.T) *cacheconfig.CacheS3Config { backend := s3mem.New() server := gofakes3.New(backend) ts := httptest.NewServer(server.Server()) ctx, cancel := context.WithTimeout(t.Context(), time.Minute) defer cancel() url, err := url.Parse(ts.URL) require.NoError(t, err) s3Config := &cacheconfig.CacheS3Config{ ServerAddress: url.Host, Insecure: true, BucketLocation: "us-west-1", BucketName: "test-bucket", AccessKey: "test-access-key", SecretKey: "test-secret-key", } t.Cleanup(func() { ts.Close() }) _, client, err := newRawS3Client(s3Config) require.NoError(t, err) _, err = client.CreateBucket(ctx, &s3.CreateBucketInput{ Bucket: aws.String(s3Config.BucketName), }) require.NoError(t, err) return s3Config } func TestS3ClientCaching(t *testing.T) { s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", BucketName: "test-bucket", BucketLocation: "us-west-2", } t.Cleanup(func() { s3ClientCache.Delete(s3Config) }) c1, err := newS3Client(s3Config) require.NoError(t, err) // Same pointer returns the same instance. c2, err := newS3Client(s3Config) require.NoError(t, err) assert.Same(t, c1.(*s3Client), c2.(*s3Client)) // A different pointer (simulating a config reload) returns a new instance. reloadedConfig := *s3Config t.Cleanup(func() { s3ClientCache.Delete(&reloadedConfig) }) c3, err := newS3Client(&reloadedConfig) require.NoError(t, err) assert.NotSame(t, c1.(*s3Client), c3.(*s3Client)) // Options bypass the cache entirely. c4, err := newS3Client(s3Config, withSTSEndpoint("http://sts.example.com")) require.NoError(t, err) assert.NotSame(t, c1.(*s3Client), c4.(*s3Client)) } func TestNewS3ClientOptions(t *testing.T) { disableDualStack := false tests := map[string]struct { s3Config cacheconfig.CacheS3Config expectedStaticCreds bool expectedRegion string expectedScheme string usePathStyle bool expectedAccelerate bool expectedDualStack bool expectedEndpoint string }{ "s3-standard": { s3Config: cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", ServerAddress: "s3.amazonaws.com", BucketName: "test-bucket", BucketLocation: "us-west-2", }, expectedStaticCreds: true, expectedRegion: "us-west-2", expectedScheme: "https", expectedEndpoint: "", expectedDualStack: true, }, "s3-standard-with-session-token": { s3Config: cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", SessionToken: "test-session-token", ServerAddress: "s3.amazonaws.com", BucketName: "test-bucket", BucketLocation: "us-west-2", }, expectedStaticCreds: true, expectedRegion: "us-west-2", expectedScheme: "https", expectedEndpoint: "", expectedDualStack: true, }, "s3-standard-dual-stack": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-west-2", DualStack: &disableDualStack, }, expectedDualStack: false, expectedRegion: "us-west-2", expectedScheme: "https", expectedEndpoint: "", }, "s3-default-address-set": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-west-2", ServerAddress: "s3.amazonaws.com", }, expectedDualStack: true, expectedRegion: "us-west-2", expectedScheme: "https", expectedEndpoint: "", }, "s3-iam-profile": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-west-2", }, expectedRegion: "us-west-2", expectedScheme: "https", expectedEndpoint: "", expectedDualStack: true, }, "s3-accelerate": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-east-1", Accelerate: true, }, expectedRegion: "us-east-1", expectedScheme: "https", expectedAccelerate: true, expectedDualStack: true, }, "s3-accelerate-custom-endpoint": { s3Config: cacheconfig.CacheS3Config{ ServerAddress: "s3-accelerate.amazonaws.com", BucketName: "test-bucket", BucketLocation: "us-east-1", }, expectedRegion: "us-east-1", expectedScheme: "https", expectedEndpoint: "https://s3-accelerate.amazonaws.com", expectedAccelerate: true, expectedDualStack: false, }, "s3-custom-endpoint": { s3Config: cacheconfig.CacheS3Config{ ServerAddress: "localhost:9000", BucketName: "test-bucket", BucketLocation: "us-west-2", Insecure: true, }, expectedRegion: "us-west-2", expectedScheme: "http", usePathStyle: true, // Not virtual-host compatible expectedEndpoint: "http://localhost:9000", expectedDualStack: false, }, "s3-dual-stack": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-east-1", }, expectedRegion: "us-east-1", expectedScheme: "https", usePathStyle: false, expectedDualStack: true, }, "s3-dual-stack-and-accelerate": { s3Config: cacheconfig.CacheS3Config{ BucketName: "test-bucket", BucketLocation: "us-east-1", Accelerate: true, }, expectedRegion: "us-east-1", expectedScheme: "https", usePathStyle: false, expectedDualStack: true, }, "s3-dual-stack-and-endpoint": { s3Config: cacheconfig.CacheS3Config{ ServerAddress: "localhost:9000", BucketName: "test-bucket", BucketLocation: "us-east-1", }, expectedRegion: "us-east-1", expectedScheme: "https", usePathStyle: true, expectedEndpoint: "https://localhost:9000", expectedDualStack: false, }, "s3-no-region": { s3Config: cacheconfig.CacheS3Config{ ServerAddress: "localhost:9000", BucketName: "test-bucket", }, expectedRegion: "us-east-1", expectedScheme: "https", usePathStyle: true, expectedEndpoint: "https://localhost:9000", expectedDualStack: false, }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { client, err := newS3Client(&tt.s3Config) require.NoError(t, err) s3Client := client.(*s3Client).client if tt.expectedStaticCreds { credsProvider := s3Client.Options().Credentials creds, err := credsProvider.Retrieve(t.Context()) require.NoError(t, err) require.Equal(t, tt.s3Config.AccessKey, creds.AccessKeyID) require.Equal(t, tt.s3Config.SecretKey, creds.SecretAccessKey) require.Equal(t, tt.s3Config.SessionToken, creds.SessionToken) } clientOptions := s3Client.Options() require.Equal(t, tt.expectedRegion, clientOptions.Region) require.Equal(t, tt.s3Config.Accelerate, clientOptions.UseAccelerate) require.Equal(t, tt.expectedDualStack, clientOptions.UseDualstack) // nolint:staticcheck require.Equal(t, tt.usePathStyle, clientOptions.UsePathStyle) if tt.expectedEndpoint == "" { require.Nil(t, clientOptions.BaseEndpoint) } else { require.Equal(t, tt.expectedEndpoint, *clientOptions.BaseEndpoint) } }) } } func TestS3Client_PresignURL(t *testing.T) { s3Config := setupMockS3Server(t) tests := map[string]struct { encryptionType string encryptionKeyID string accessKey string secretKey string expectedEncryption string expectedKMSKeyID string }{ "no-encryption-with-credentials": { encryptionType: "", accessKey: "test-access-key", secretKey: "test-secret-key", expectedEncryption: "", expectedKMSKeyID: "", }, "s3-encryption-with-credentials": { encryptionType: "S3", accessKey: "test-access-key", secretKey: "test-secret-key", expectedEncryption: "AES256", expectedKMSKeyID: "", }, "kms-encryption-with-credentials": { encryptionType: "KMS", encryptionKeyID: "alias/my-key", accessKey: "test-access-key", secretKey: "test-secret-key", expectedEncryption: "aws:kms", expectedKMSKeyID: "alias/my-key", }, "kms-dsse-encryption-with-credentials": { encryptionType: "DSSE-KMS", encryptionKeyID: "alias/my-key", accessKey: "test-access-key", secretKey: "test-secret-key", expectedEncryption: "aws:kms:dsse", expectedKMSKeyID: "alias/my-key", }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { s3Config.ServerSideEncryption = tt.encryptionType s3Config.ServerSideEncryptionKeyID = tt.encryptionKeyID s3Config.AccessKey = tt.accessKey s3Config.SecretKey = tt.secretKey s3Client, err := newS3Client(s3Config) require.NoError(t, err) // Presign a PUT request to upload an object objectName := "test-object" url, err := s3Client.PresignURL(t.Context(), http.MethodPut, s3Config.BucketName, objectName, nil, 5*time.Minute) require.NoError(t, err) // Verify encryption headers if tt.expectedEncryption != "" { assert.Equal(t, tt.expectedEncryption, url.Headers.Get("x-amz-server-side-encryption")) } if tt.expectedKMSKeyID != "" { assert.Equal(t, tt.expectedKMSKeyID, url.Headers.Get("x-amz-server-side-encryption-aws-kms-key-id")) } // Use the presigned URL to upload an object content := []byte("Hello, world!") req, err := http.NewRequest(http.MethodPut, url.URL.String(), bytes.NewReader(content)) require.NoError(t, err) client := &http.Client{} resp, err := client.Do(req) require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) resp.Body.Close() // Presign a GET request to download the object url, err = s3Client.PresignURL(t.Context(), http.MethodGet, s3Config.BucketName, objectName, nil, 5*time.Minute) require.NoError(t, err) req, err = http.NewRequest(http.MethodGet, url.URL.String(), bytes.NewReader(content)) require.NoError(t, err) resp, err = client.Do(req) require.NoError(t, err) assert.Equal(t, http.StatusOK, resp.StatusCode) body, err := io.ReadAll(resp.Body) require.NoError(t, err) resp.Body.Close() assert.Equal(t, content, body) // Presign a HEAD request to verify object existence url, err = s3Client.PresignURL(t.Context(), http.MethodHead, s3Config.BucketName, objectName, nil, 5*time.Minute) require.NoError(t, err) req, err = http.NewRequest(http.MethodHead, url.URL.String(), nil) require.NoError(t, err) resp, err = client.Do(req) require.NoError(t, err) resp.Body.Close() assert.Equal(t, http.StatusOK, resp.StatusCode) }) } } func TestS3Client_PresignURL_UnknownMethodError(t *testing.T) { s3Config := setupMockS3Server(t) s3Client, err := newS3Client(s3Config) require.NoError(t, err) _, err = s3Client.PresignURL(t.Context(), "INVALID", s3Config.BucketName, "some-object", nil, 5*time.Minute) require.Error(t, err) assert.Contains(t, err.Error(), "unsupported method: INVALID") } func newMockSTSHandler(expectedKms bool, expectedDurationSecs int, s3Partition string) http.Handler { roleARN := "arn:aws:iam::123456789012:role/TestRole" expectedStatements := 1 if expectedKms { expectedStatements = 2 } return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/sts" { http.NotFound(w, r) return } body, err := io.ReadAll(r.Body) if err != nil { http.Error(w, "Failed to read request body", http.StatusBadRequest) return } defer r.Body.Close() queryValues, err := url.ParseQuery(string(body)) if err != nil { http.Error(w, "Failed to parse request body", http.StatusBadRequest) return } if queryValues.Get("Action") != "AssumeRole" { http.Error(w, "Invalid Action parameter", http.StatusBadRequest) return } if queryValues.Get("RoleArn") == "" { http.Error(w, "Missing RoleArn parameter", http.StatusBadRequest) return } if queryValues.Get("RoleArn") != roleARN { http.Error(w, "Invalid RoleArn parameter", http.StatusUnauthorized) return } if queryValues.Get("DurationSeconds") != fmt.Sprintf("%d", expectedDurationSecs) { http.Error(w, "Invalid DurationSeconds parameter", http.StatusUnauthorized) return } if queryValues.Get("RoleSessionName") == "" { http.Error(w, "Missing RoleSessionName parameter", http.StatusBadRequest) return } policy := queryValues.Get("Policy") if policy == "" { http.Error(w, "Missing Policy parameter", http.StatusBadRequest) return } var policyJSON sessionPolicy err = json.Unmarshal([]byte(policy), &policyJSON) if err != nil { http.Error(w, "Invalid Policy JSON", http.StatusBadRequest) return } if policyJSON.Statement == nil || len(policyJSON.Statement) != expectedStatements { http.Error(w, fmt.Sprintf("Policy must contain exactly %d Statements", expectedStatements), http.StatusBadRequest) return } statement := policyJSON.Statement[0] if statement.Action == nil || len(statement.Action) != 1 { http.Error(w, "Statement must contain exactly one Action", http.StatusBadRequest) return } if statement.Action[0] != "s3:PutObject" { http.Error(w, "Action should be s3:PutObject", http.StatusBadRequest) return } if expectedKms { kmsStatement := policyJSON.Statement[1] if kmsStatement.Action == nil || len(kmsStatement.Action) != 2 { http.Error(w, "KMS Statement must contain exactly two Actions", http.StatusBadRequest) return } if kmsStatement.Action[0] != "kms:Decrypt" || kmsStatement.Action[1] != "kms:GenerateDataKey" { http.Error(w, "KMS Statement Actions should be kms:Decrypt and kms:GenerateDataKey", http.StatusBadRequest) return } } if s3Partition == "" { s3Partition = "aws" } if statement.Resource != fmt.Sprintf("arn:%s:s3:::%s/%s", s3Partition, bucketName, objectName) { http.Error(w, "Invalid policy statement", http.StatusBadRequest) return } w.Header().Set("Content-Type", "application/xml") w.WriteHeader(http.StatusOK) // See https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html _, err = w.Write([]byte(` mock-access-key mock-secret-key mock-session-token ` + time.Now().Add(time.Hour).Format(time.RFC3339) + ` AROATEST123:TestSession arn:aws:sts::123456789012:assumed-role/TestRole/TestSession c6104cbe-af31-11e0-8154-cbc7ccf896c7 `)) if err != nil { w.WriteHeader(http.StatusExpectationFailed) } }) } func TestFetchCredentialsForRole(t *testing.T) { workingConfig := cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: "test-bucket", UploadRoleARN: "arn:aws:iam::123456789012:role/TestRole", }, } mockedCreds := map[string]string{ "AWS_ACCESS_KEY_ID": "mock-access-key", "AWS_SECRET_ACCESS_KEY": "mock-secret-key", "AWS_SESSION_TOKEN": "mock-session-token", "AWS_PROFILE": "", } govCloudConfig := cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", BucketLocation: "us-gov-west-1", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: "test-bucket", UploadRoleARN: "arn:aws:iam::123456789012:role/TestRole", }, } chinaConfig := cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", BucketLocation: "cn-north-1", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: "test-bucket", UploadRoleARN: "arn:aws:iam::123456789012:role/TestRole", }, } tests := map[string]struct { config *cacheconfig.Config roleARN string expected map[string]string errMsg string expectedKms bool duration time.Duration expectedDuration time.Duration s3Partition string }{ "successful fetch": { config: &workingConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", expected: mockedCreds, }, "successful fetch with GovCloud config": { config: &govCloudConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", expected: mockedCreds, s3Partition: "aws-us-gov", }, "successful fetch with China config": { config: &chinaConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", expected: mockedCreds, s3Partition: "aws-cn", }, "successful fetch with 12-hour timeout downgraded to 1-hour": { config: &workingConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", duration: 12 * time.Hour, expected: mockedCreds, expectedDuration: 1 * time.Hour, }, "successful fetch with 10-minute timeout": { config: &workingConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", duration: 10 * time.Minute, expected: mockedCreds, expectedDuration: 1 * time.Hour, }, "successful fetch with 13-hour timeout": { config: &workingConfig, roleARN: "arn:aws:iam::123456789012:role/TestRole", duration: 13 * time.Hour, expected: mockedCreds, expectedDuration: 1 * time.Hour, }, "successful fetch with encryption": { config: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: "test-bucket", UploadRoleARN: "arn:aws:iam::123456789012:role/TestRole", ServerSideEncryption: "KMS", ServerSideEncryptionKeyID: "arn:aws:kms:us-west-2:123456789012:key/1234abcd-12ab-34cd-56ef-1234567890ab", }, }, roleARN: "arn:aws:iam::123456789012:role/TestRole", expected: mockedCreds, expectedKms: true, }, "invalid role ARN": { config: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, UploadRoleARN: "arn:aws:iam::123456789012:role/InvalidRole", }, }, roleARN: "arn:aws:iam::123456789012:role/InvalidRole", errMsg: "failed to assume role", }, "no role ARN": { config: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, }, }, expected: nil, errMsg: "failed to assume role", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) duration := 3600 if tt.duration > 0 { duration = int(tt.expectedDuration.Seconds()) } // Create s3Client and point STS endpoint to it mockServer := httptest.NewServer(newMockSTSHandler(tt.expectedKms, duration, tt.s3Partition)) defer mockServer.Close() s3Client, err := newS3Client(tt.config.S3, withSTSEndpoint(mockServer.URL+"/sts")) require.NoError(t, err) creds, err := s3Client.FetchCredentialsForRole(t.Context(), tt.roleARN, bucketName, objectName, true, tt.duration) if tt.errMsg != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.errMsg) } else { require.NoError(t, err) assert.Equal(t, tt.expected, creds) } }) } } func histogramSampleCount(t *testing.T, h prometheus.Histogram) uint64 { t.Helper() var m dto.Metric require.NoError(t, h.Write(&m)) return m.GetHistogram().GetSampleCount() } // TestFetchCredentialsForRole_ConcurrencyLimit verifies that at most 5 // AssumeRole calls are in-flight at any time. func TestFetchCredentialsForRole_ConcurrencyLimit(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) const semSize = 5 const numRequests = 8 testSem := make(chan struct{}, semSize) var currentInFlight atomic.Int32 reached := make(chan struct{}, numRequests) release := make(chan struct{}) successXML := ` mock-access-key mock-secret-key mock-session-token ` + time.Now().Add(time.Hour).Format(time.RFC3339) + ` test ` server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/sts" { http.NotFound(w, r) return } _, _ = io.ReadAll(r.Body) r.Body.Close() currentInFlight.Add(1) reached <- struct{}{} <-release currentInFlight.Add(-1) w.Header().Set("Content-Type", "application/xml") _, _ = w.Write([]byte(successXML)) })) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } client, err := newS3Client(s3Config, withSTSEndpoint(server.URL+"/sts"), withAssumeRoleSem(testSem)) require.NoError(t, err) var wg sync.WaitGroup for range numRequests { wg.Add(1) go func() { defer wg.Done() _, _ = client.FetchCredentialsForRole(t.Context(), "arn:aws:iam::123456789012:role/TestRole", bucketName, objectName, true, 0) }() } // Wait for exactly semSize requests to be in-flight inside the handler. for range semSize { select { case <-reached: case <-time.After(5 * time.Second): t.Fatal("timed out waiting for requests to reach server") } } assert.EqualValues(t, semSize, currentInFlight.Load()) close(release) wg.Wait() } // TestFetchCredentialsForRole_ContextCancelledWaitingForSemaphore verifies // that a cancelled context while waiting for a semaphore slot is returned // immediately as an error. func TestFetchCredentialsForRole_ContextCancelledWaitingForSemaphore(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) fullSem := make(chan struct{}, 5) for range 5 { fullSem <- struct{}{} } ctx, cancel := context.WithCancel(t.Context()) cancel() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } client, err := newS3Client(s3Config, withSTSEndpoint("http://127.0.0.1:0/sts"), withAssumeRoleSem(fullSem)) require.NoError(t, err) _, err = client.FetchCredentialsForRole(ctx, "arn:aws:iam::123456789012:role/TestRole", bucketName, objectName, true, 0) require.Error(t, err) assert.Contains(t, err.Error(), "context cancelled waiting for AssumeRole semaphore") } // TestFetchCredentialsForRole_Metrics verifies that a successful call updates // the in-flight gauge and records an observation in both duration histograms. func TestFetchCredentialsForRole_Metrics(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) origInFlight := assumeRoleInFlight origWait := assumeRoleWaitDuration origCall := assumeRoleCallDuration testInFlight := prometheus.NewGauge(prometheus.GaugeOpts{Name: "test_in_flight", Help: "test"}) testWait := prometheus.NewHistogram(prometheus.HistogramOpts{Name: "test_wait", Help: "test"}) testCall := prometheus.NewHistogram(prometheus.HistogramOpts{Name: "test_call", Help: "test"}) assumeRoleInFlight = testInFlight assumeRoleWaitDuration = testWait assumeRoleCallDuration = testCall t.Cleanup(func() { assumeRoleInFlight = origInFlight assumeRoleWaitDuration = origWait assumeRoleCallDuration = origCall }) mockServer := httptest.NewServer(newMockSTSHandler(false, 3600, "")) defer mockServer.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, } client, err := newS3Client(s3Config, withSTSEndpoint(mockServer.URL+"/sts")) require.NoError(t, err) _, err = client.FetchCredentialsForRole(t.Context(), "arn:aws:iam::123456789012:role/TestRole", bucketName, objectName, true, 0) require.NoError(t, err) // In-flight gauge must return to 0 after the call completes. assert.EqualValues(t, 0, testutil.ToFloat64(testInFlight)) // Both histograms must have recorded exactly one observation. assert.EqualValues(t, 1, histogramSampleCount(t, testWait)) assert.EqualValues(t, 1, histogramSampleCount(t, testCall)) } func TestDetectBucketLocation(t *testing.T) { tests := map[string]struct { locationConstraint string serverError bool expectedLocation string }{ "returns region from custom endpoint": { locationConstraint: "us-west-2", expectedLocation: "us-west-2", }, "maps EU alias to eu-west-1": { locationConstraint: "EU", expectedLocation: "eu-west-1", }, "falls back to us-east-1 on server error": { serverError: true, expectedLocation: fallbackBucketLocation, }, "falls back to us-east-1 on empty location constraint": { locationConstraint: "", expectedLocation: fallbackBucketLocation, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { // Provide static credentials so the SDK doesn't attempt IMDS lookups. t.Setenv("AWS_ACCESS_KEY_ID", "test-access-key") t.Setenv("AWS_SECRET_ACCESS_KEY", "test-secret-key") serverCalled := false ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { serverCalled = true if tt.serverError { http.Error(w, "internal error", http.StatusInternalServerError) return } // GetBucketLocation is a GET /?location request. // Respond with the configured location constraint for any request. w.Header().Set("Content-Type", "application/xml") w.WriteHeader(http.StatusOK) fmt.Fprintf(w, `%s`, tt.locationConstraint, ) })) defer ts.Close() tsURL, err := url.Parse(ts.URL) require.NoError(t, err) s3Config := &cacheconfig.CacheS3Config{ BucketName: "test-bucket", ServerAddress: tsURL.Host, Insecure: true, } location := detectBucketLocation(s3Config) assert.Equal(t, tt.expectedLocation, location) assert.True(t, serverCalled, "expected the mock server to be contacted") }) } } // TestFetchCredentialsForRole_CacheHit verifies that a second call with the // same (roleARN, bucketName, objectName, upload) tuple returns the cached // credentials without issuing a new STS request. func TestFetchCredentialsForRole_CacheHit(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) var callCount atomic.Int32 server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { callCount.Add(1) w.Header().Set("Content-Type", "application/xml") _, _ = fmt.Fprintf(w, ` cached-key cached-secret cached-token %s test `, time.Now().Add(time.Hour).Format(time.RFC3339)) })) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } roleARN := "arn:aws:iam::123456789012:role/CacheTestRole" client, err := newS3Client(s3Config, withSTSEndpoint(server.URL)) require.NoError(t, err) // First call: hits STS. creds1, err := client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, false, 0) require.NoError(t, err) assert.EqualValues(t, 1, callCount.Load(), "first call should reach STS") // Second call with the same key: must return the cached creds, not call STS again. creds2, err := client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, false, 0) require.NoError(t, err) assert.EqualValues(t, 1, callCount.Load(), "second call must be served from cache") assert.Equal(t, creds1, creds2) // A call with a different key (upload=true) must reach STS. _, err = client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, true, 0) require.NoError(t, err) assert.EqualValues(t, 2, callCount.Load(), "different key must reach STS") } // TestFetchCredentialsForRole_CacheExpiry verifies that a cached credential // that does not have enough remaining validity is not reused. func TestFetchCredentialsForRole_CacheExpiry(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) // Pre-populate the cache with credentials that expire in 30 seconds — // less than the 1-minute minimum validity floor. credKey := assumeRoleCacheKey("arn:aws:iam::123456789012:role/ExpiryRole", bucketName, objectName, false) assumeRoleCredCache.Add(credKey, cachedCredential{ creds: map[string]string{"AWS_ACCESS_KEY_ID": "stale-key"}, expiresAt: time.Now().Add(30 * time.Second), }) var callCount atomic.Int32 server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { callCount.Add(1) w.Header().Set("Content-Type", "application/xml") _, _ = fmt.Fprintf(w, ` fresh-key fresh-secret fresh-token %s test `, time.Now().Add(time.Hour).Format(time.RFC3339)) })) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } client, err := newS3Client(s3Config, withSTSEndpoint(server.URL)) require.NoError(t, err) creds, err := client.FetchCredentialsForRole(t.Context(), "arn:aws:iam::123456789012:role/ExpiryRole", bucketName, objectName, false, 0) require.NoError(t, err) assert.EqualValues(t, 1, callCount.Load(), "expired cache entry must not be reused") assert.Equal(t, "fresh-key", creds["AWS_ACCESS_KEY_ID"]) } // TestFetchCredentialsForRole_NoErrorCaching verifies that a failed AssumeRole // call does not populate the cache. func TestFetchCredentialsForRole_NoErrorCaching(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) // Use an unreachable STS endpoint to force an error. s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } roleARN := "arn:aws:iam::123456789012:role/ErrorRole" client, err := newS3Client(s3Config, withSTSEndpoint("http://127.0.0.1:0")) require.NoError(t, err) _, err = client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, false, 0) require.Error(t, err) // The cache must not contain an entry for the failed key. credKey := assumeRoleCacheKey(roleARN, bucketName, objectName, false) _, cached := assumeRoleCredCache.Get(credKey) assert.False(t, cached, "failed AssumeRole call must not be cached") } // TestFlushCredentialCache verifies that FlushCredentialCache removes all // entries regardless of their validity. func TestFlushCredentialCache(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) assumeRoleCredCache.Add("key-a", cachedCredential{ creds: map[string]string{"k": "v"}, expiresAt: time.Now().Add(time.Hour), }) assumeRoleCredCache.Add("key-b", cachedCredential{ creds: map[string]string{"k": "v"}, expiresAt: time.Now().Add(time.Hour), }) require.Equal(t, 2, assumeRoleCredCache.Len()) FlushCredentialCache() assert.Equal(t, 0, assumeRoleCredCache.Len()) } // TestFetchCredentialsForRole_CacheMetrics verifies that cache hits and misses // are counted correctly. func TestFetchCredentialsForRole_CacheMetrics(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) origHits := assumeRoleCredCacheHits origMisses := assumeRoleCredCacheMisses testHits := prometheus.NewCounter(prometheus.CounterOpts{Name: "test_cache_hits", Help: "test"}) testMisses := prometheus.NewCounter(prometheus.CounterOpts{Name: "test_cache_misses", Help: "test"}) assumeRoleCredCacheHits = testHits assumeRoleCredCacheMisses = testMisses t.Cleanup(func() { assumeRoleCredCacheHits = origHits assumeRoleCredCacheMisses = origMisses }) server := httptest.NewServer(newMockSTSHandler(false, 3600, "")) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } roleARN := "arn:aws:iam::123456789012:role/TestRole" client, err := newS3Client(s3Config, withSTSEndpoint(server.URL+"/sts")) require.NoError(t, err) // First call: cache miss. _, err = client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, true, 0) require.NoError(t, err) assert.EqualValues(t, 0, testutil.ToFloat64(testHits)) assert.EqualValues(t, 1, testutil.ToFloat64(testMisses)) // Second call with the same key: cache hit. _, err = client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, true, 0) require.NoError(t, err) assert.EqualValues(t, 1, testutil.ToFloat64(testHits)) assert.EqualValues(t, 1, testutil.ToFloat64(testMisses)) } // TestFetchCredentialsForRole_CacheDisabled verifies that setting // DisableAssumeRoleCredentialsCaching causes every call to reach STS. func TestFetchCredentialsForRole_CacheDisabled(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) var callCount atomic.Int32 server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { callCount.Add(1) w.Header().Set("Content-Type", "application/xml") _, _ = fmt.Fprintf(w, ` key secret token %s test `, time.Now().Add(time.Hour).Format(time.RFC3339)) })) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", DisableAssumeRoleCredentialsCaching: true, } roleARN := "arn:aws:iam::123456789012:role/TestRole" client, err := newS3Client(s3Config, withSTSEndpoint(server.URL)) require.NoError(t, err) for range 3 { _, err = client.FetchCredentialsForRole(t.Context(), roleARN, bucketName, objectName, true, 0) require.NoError(t, err) } assert.EqualValues(t, 3, callCount.Load(), "every call must reach STS when caching is disabled") _, cached := assumeRoleCredCache.Get(assumeRoleCacheKey(roleARN, bucketName, objectName, true)) assert.False(t, cached, "disabled cache must not be populated") } func TestChecksumDefaults(t *testing.T) { tests := map[string]struct { serverAddress string envResponse string envRequest string wantResponse aws.ResponseChecksumValidation wantRequest aws.RequestChecksumCalculation }{ "custom endpoint defaults to WhenRequired": { serverAddress: "minio.example.com:9000", wantResponse: aws.ResponseChecksumValidationWhenRequired, wantRequest: aws.RequestChecksumCalculationWhenRequired, }, "AWS endpoint uses SDK default WhenSupported": { serverAddress: "", wantResponse: aws.ResponseChecksumValidationWhenSupported, wantRequest: aws.RequestChecksumCalculationWhenSupported, }, "explicit AWS default endpoint uses SDK default WhenSupported": { serverAddress: "s3.amazonaws.com", wantResponse: aws.ResponseChecksumValidationWhenSupported, wantRequest: aws.RequestChecksumCalculationWhenSupported, }, "custom endpoint: env var overrides response checksum validation": { serverAddress: "minio.example.com:9000", envResponse: "when_supported", wantResponse: aws.ResponseChecksumValidationWhenSupported, wantRequest: aws.RequestChecksumCalculationWhenRequired, }, "custom endpoint: env var overrides request checksum calculation": { serverAddress: "minio.example.com:9000", envRequest: "when_supported", wantResponse: aws.ResponseChecksumValidationWhenRequired, wantRequest: aws.RequestChecksumCalculationWhenSupported, }, "custom endpoint: both env vars override defaults": { serverAddress: "minio.example.com:9000", envResponse: "when_supported", envRequest: "when_supported", wantResponse: aws.ResponseChecksumValidationWhenSupported, wantRequest: aws.RequestChecksumCalculationWhenSupported, }, "AWS endpoint: env var can still set WhenRequired": { serverAddress: "", envResponse: "when_required", envRequest: "when_required", wantResponse: aws.ResponseChecksumValidationWhenRequired, wantRequest: aws.RequestChecksumCalculationWhenRequired, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { // Neutralize any ambient env vars; set the test-specific values. t.Setenv("AWS_RESPONSE_CHECKSUM_VALIDATION", tt.envResponse) t.Setenv("AWS_REQUEST_CHECKSUM_CALCULATION", tt.envRequest) s3Config := &cacheconfig.CacheS3Config{ ServerAddress: tt.serverAddress, AccessKey: "test-access-key", SecretKey: "test-secret-key", BucketName: "test-bucket", BucketLocation: "us-east-1", } cfg, _, err := newRawS3Client(s3Config) require.NoError(t, err) assert.Equal(t, tt.wantResponse, cfg.ResponseChecksumValidation) assert.Equal(t, tt.wantRequest, cfg.RequestChecksumCalculation) }) } } func TestFetchCredentialsForRole_FailureMetric(t *testing.T) { FlushCredentialCache() t.Cleanup(FlushCredentialCache) origFailures := assumeRoleFailures t.Cleanup(func() { assumeRoleFailures = origFailures }) t.Run("STS error increments counter", func(t *testing.T) { testFailures := prometheus.NewCounter(prometheus.CounterOpts{Name: "test_failures_sts", Help: "test"}) assumeRoleFailures = testFailures s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } client, err := newS3Client(s3Config, withSTSEndpoint("http://127.0.0.1:0")) require.NoError(t, err) _, err = client.FetchCredentialsForRole(t.Context(), "arn:aws:iam::123456789012:role/TestRole", bucketName, objectName, true, 0) require.Error(t, err) assert.EqualValues(t, 1, testutil.ToFloat64(testFailures)) }) t.Run("nil credentials increments counter", func(t *testing.T) { testFailures := prometheus.NewCounter(prometheus.CounterOpts{Name: "test_failures_nil", Help: "test"}) assumeRoleFailures = testFailures server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/xml") w.WriteHeader(http.StatusOK) _, _ = w.Write([]byte(` AROATEST123:TestSession arn:aws:sts::123456789012:assumed-role/TestRole/TestSession test `)) })) defer server.Close() s3Config := &cacheconfig.CacheS3Config{ AccessKey: "test-access-key", SecretKey: "test-secret-key", AuthenticationType: "access-key", BucketName: bucketName, BucketLocation: "us-east-1", } client, err := newS3Client(s3Config, withSTSEndpoint(server.URL+"/sts")) require.NoError(t, err) _, err = client.FetchCredentialsForRole(t.Context(), "arn:aws:iam::123456789012:role/TestRole", bucketName, objectName, true, 0) require.Error(t, err) assert.EqualValues(t, 1, testutil.ToFloat64(testFailures)) }) } ================================================ FILE: cache/test/adapter.go ================================================ package test import ( "context" "fmt" "net/http" "net/url" "time" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" ) type testAdapter struct { objectName string useGoCloud bool metadata map[string]string } func (t *testAdapter) GetDownloadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{URL: t.getURL("download")} } func (t *testAdapter) GetHeadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{URL: t.getURL("head")} } func (t *testAdapter) GetUploadURL(ctx context.Context) cache.PresignedURL { return cache.PresignedURL{URL: t.getURL("upload"), Headers: t.GetUploadHeaders()} } func (t *testAdapter) GetUploadHeaders() http.Header { headers := http.Header{} headers.Set("header-1", "a value") for k, v := range t.metadata { headers.Set("x-fakecloud-meta-"+k, v) } return headers } func (t *testAdapter) GetGoCloudURL(ctx context.Context, _ bool) (cache.GoCloudURL, error) { goCloudURL := cache.GoCloudURL{} if t.useGoCloud { u, _ := url.Parse(fmt.Sprintf("gocloud://test/%s", t.objectName)) q := url.Values{} for k, v := range t.metadata { q.Add("x-fakecloud-meta-"+k, v) } u.RawQuery = q.Encode() goCloudURL.URL = u goCloudURL.Environment = t.getUploadEnv(ctx) return goCloudURL, nil } return goCloudURL, nil } func (t *testAdapter) WithMetadata(metadata map[string]string) { t.metadata = metadata } func (t *testAdapter) getUploadEnv(_ context.Context) map[string]string { return map[string]string{ "FIRST_VAR": "123", "SECOND_VAR": "456", } } func (t *testAdapter) getURL(operation string) *url.URL { return &url.URL{ Scheme: "test", Host: operation, Path: t.objectName, } } func New(_ *cacheconfig.Config, _ time.Duration, objectName string) (cache.Adapter, error) { return &testAdapter{objectName: objectName}, nil } func NewGoCloudAdapter(_ *cacheconfig.Config, _ time.Duration, objectName string) (cache.Adapter, error) { return &testAdapter{objectName: objectName, useGoCloud: true}, nil } func init() { if err := cache.Factories().Register("test", New); err != nil { panic(err) } if err := cache.Factories().Register("goCloudTest", NewGoCloudAdapter); err != nil { panic(err) } } ================================================ FILE: certs/README.md ================================================ This directory contains public certificates for signing GitLab Runner binaries. ### Certificates #### `gitlab-inc-ssl-com.crt` This certificate is issued by SSL.com and is used to sign Windows binaries. Valid from 2025-03-18 to 2027-11-20. #### `apple-developer-id-app-cert.cer` This certificate is issued by Apple and is used to sign macOS binaries. The certificate can also be [downloaded from the Apple Developer Certificates page (requires access to the GitLab group)](https://developer.apple.com/account/resources/certificates/list). Note that [Developer ID Application certificates](https://developer.apple.com/support/developer-id/) can only be uploaded by an owner. Valid from 2025-08-18 to 2030-08-19. ### Windows signing process The private key for the certificates are stored in a Google Cloud HSM. The following diagram shows how GitLab Runner binaries are signed: ```mermaid sequenceDiagram participant CI as GitLab CI Job participant OIDC as GitLab OIDC Provider participant GCP as GCP STS/IAM participant Project as gitlab-runner-signing Project participant HSM as GCP HSM participant Binary as Windows Binary CI->>OIDC: Request OIDC JWT token OIDC-->>CI: Return JWT token with job claims CI->>GCP: Exchange JWT for GCP access token
(sts.googleapis.com) GCP-->>CI: GCP access token CI->>Project: Impersonate service account using token Project-->>CI: Service account credentials CI->>Binary: Create binary CI->>HSM: Sign binary using HSM key via Google PKCS#11 library
(key never leaves HSM) HSM-->>CI: Return signature CI->>Binary: Apply signature to binary ``` The `binaries` CI job uses `scripts/sign-{windows,macos}-binaries` to sign binaries for Windows and macOS, respectively. The private key is never accessed directly by the service account during the signing process. ### PKCS#11 architecture ```plaintext ┌─────────────────┐ ┌──────────────────┐ ┌─────────────────┐ │ osslsigncode │───▶│ P11_ENGINE │───▶│ Google's PKCS11 │ │ (OpenSSL- │ │ (OpenSSL │ │ Provider │ │ based) │ │ PKCS11 bridge)│ │ (libkmsp11.so) │ └─────────────────┘ └──────────────────┘ └─────────────────┘ ┌─────────────────┐ ┌─────────────────┐ │ rcodesign │────────────────────────────▶│ Google's PKCS11 │ │ (native │ │ Provider │ │ PKCS11) │ │ (libkmsp11.so) │ └─────────────────┘ └─────────────────┘ ``` For Windows binaries, the script uses [`osslsigncode`](https://github.com/mtrojnar/osslsigncode) with the [Google PKCS#11 library](https://github.com/GoogleCloudPlatform/kms-integrations). As the diagram shows above, `osslsigncode` uses the OpenSSL PKCS#11 bridge to load the Google PKCS#11 provider. See [the user guide](https://github.com/GoogleCloudPlatform/kms-integrations/blob/master/kmsp11/docs/user_guide.md) for more details. For macOS binaries, the script uses [`rcodesign`](https://github.com/indygreg/apple-platform-rs) with [PKCS#11 support](https://github.com/indygreg/apple-platform-rs/pull/198). Unlike `osslsigncode`, `rcodesign` natively loads Google's PKCS#11 library. See the [documentation](https://gregoryszorc.com/docs/apple-codesign/stable/apple_codesign_getting_started.html) for more details. Note that we have to [compile our own binary with PKCS#11 support](https://gitlab.com/gitlab-org/ci-cd/runner-tools/base-images/-/merge_requests/54) because: - The stock `rcodesign` only provides a Linux musl build with a limited feature set. - `rcodesign` needs to run in an RedHat's Univeral Base Image (UBI) 8, which ships an older glibc version than most current systems. Note that the service account needs two [Google KMS IAM roles](https://cloud.google.com/kms/docs/reference/permissions-and-roles#cloudkms.signerVerifier) for the Google PKCS11 library to work: - Cloud KMS CryptoKey Signer/Verifier (`roles/cloudkms.signerVerifier`) - Cloud KMS Viewer (`roles/cloudkms.viewer`) The Cloud KMS Viewer role allows the account to retrieve metadata about the keys. The diagram omits the fact that the Google PKCS11 library lists all the keys in the key ring and retrieves information about them. Ideally, only `roles/cloudkms.signerVerifier` would be needed. There is an [open feature request to reduce the permission](https://github.com/GoogleCloudPlatform/kms-integrations/issues/45) when only signing is needed. ================================================ FILE: certs/gitlab-inc-ssl-com.crt ================================================ -----BEGIN CERTIFICATE----- MIIFnjCCA4agAwIBAgIQOccxLuqXNkQ+5mOmabYjpjANBgkqhkiG9w0BAQsFADB7 MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0b24x ETAPBgNVBAoMCFNTTCBDb3JwMTcwNQYDVQQDDC5TU0wuY29tIEVWIENvZGUgU2ln bmluZyBJbnRlcm1lZGlhdGUgQ0EgUlNBIFIzMB4XDTI1MDMxODA5MTUxNVoXDTI3 MTEyMDEyMzQxNVowgccxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlh MRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMRQwEgYDVQQKDAtHaXRMYWIgSW5jLjEQ MA4GA1UEBRMHNTYwMTI3OTEUMBIGA1UEAwwLR2l0TGFiIEluYy4xHTAbBgNVBA8M FFByaXZhdGUgT3JnYW5pemF0aW9uMRkwFwYLKwYBBAGCNzwCAQIMCERlbGF3YXJl MRMwEQYLKwYBBAGCNzwCAQMTAlVTMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE AJ83PXya71Rj97/9GJ/IVe/VsBcBJR0+CIdkTBAdFDjwujKaTJk82cWwmXJ3xsUi AlJadjvMhdVl2xxf0ah8yaOCAZowggGWMAwGA1UdEwEB/wQCMAAwHwYDVR0jBBgw FoAUNr1J/zEs669qQP6ZwBbtuvxI3V8wfQYIKwYBBQUHAQEEcTBvMEsGCCsGAQUF BzAChj9odHRwOi8vY2VydC5zc2wuY29tL1NTTGNvbS1TdWJDQS1FVi1Db2RlU2ln bmluZy1SU0EtNDA5Ni1SMy5jZXIwIAYIKwYBBQUHMAGGFGh0dHA6Ly9vY3Nwcy5z c2wuY29tMFAGA1UdIARJMEcwBwYFZ4EMAQMwPAYMKwYBBAGCqTABAwMCMCwwKgYI KwYBBQUHAgEWHmh0dHBzOi8vd3d3LnNzbC5jb20vcmVwb3NpdG9yeTATBgNVHSUE DDAKBggrBgEFBQcDAzBQBgNVHR8ESTBHMEWgQ6BBhj9odHRwOi8vY3Jscy5zc2wu Y29tL1NTTGNvbS1TdWJDQS1FVi1Db2RlU2lnbmluZy1SU0EtNDA5Ni1SMy5jcmww HQYDVR0OBBYEFOMtINlLIny96uWtEmlVfuYITNbWMA4GA1UdDwEB/wQEAwIHgDAN BgkqhkiG9w0BAQsFAAOCAgEAj8MCavNSVimQz5t+FIJd9UqqyBebd2SmyYPM0YtV 7CuC0Gvc6zO0AuKxEJqdusAjZrjeAGmBRsaV7c4UftavlcPEXa1Sg5FnH+fKYjV7 vzOn4aNH/s81QCHiUlVYhy9lzbbAGlY8zeos5CzEfOnVhtPXxgVnf2Qwj+pNv96J WIeRTTwDfWvu4Sg0ydaAjqzP9o4zD+PrT7JfQB1lXG2+/9mpjtjYXPQ+u3S9YUi7 RUtbXzHjlhRK3+N2UmiZVkqtPisRP1qu/H8HSGet98aDBO+Ov0kp4hhL2CVlXncz JYrqGgSN/VTjvxCERKi2aBUNgqA4ee3cIYgGH0DHAjPKR5SQ8AgBQcqmYKi7r56R kq3vmetdxnbK9pUD3kuNbxQOXSravgdxuToEo5eOZgil3WXBiUxKNivfiMfZxbTx NCa4TD8PJOosy/7XQyk3+a8GZKWej7k2auWkEXynNM4Rxkg6wp+JN7k1a121DAig ArHttZfV0JqsUbWyoPVp/Ev60rpY7xm+cF6EbdxjXKMP/H/frwTfF7b7k80Tg8SV uA3APcdYRUfDY7pw2XnlR3B83hgCiu9Z2lzYhvqCjfDcxm0jQVrpJj0ftv3r0/br pqg/UGGxC+ZsOIAOs4d2iWyRvlaVTicT+YvuJgx2UBdcKT4/rzmSTkwYgbQKhBuY P/Y= -----END CERTIFICATE----- ================================================ FILE: ci/.test-failures.servercore1809.txt ================================================ TestBuildCancel TestBuildCancel/cmd ================================================ FILE: ci/.test-failures.servercore21H2.txt ================================================ TestBuildCancel TestBuildCancel/cmd ================================================ FILE: ci/prebuilt_helper_image ================================================ #!/usr/bin/env sh IMAGE=$1 if echo "$IMAGE" | grep -q "prebuilt-"; then exit 0 fi if echo "$IMAGE" | grep -q "_archive"; then exit 0 fi dir=$(basename "$IMAGE" .tar) mkdir -p out/helper-images/"$dir" tar -xf "$IMAGE" -C out/helper-images/"$dir" archive=$(dirname "$IMAGE")/archive-$(basename "$IMAGE") prebuilt=$(dirname "$IMAGE")/prebuilt-$(basename "$IMAGE") rm -f "${archive}" skopeo copy oci:"out/helper-images/${dir}" docker-archive:"${archive}" if echo "$IMAGE" | grep -q "windows"; then prebuilt="$(dirname "$IMAGE")/$(basename "${prebuilt}" .tar).docker.tar.zst" rm -f "${prebuilt}" time zstd -9 -o "${prebuilt}" "${archive}" rm "${archive}" exit 0 fi docker export -o "$prebuilt" "$(docker create "$(docker load <"${archive}" | grep "Loaded image ID:" | awk '{print $4}')")" rm -rf "out/helper-images/${dir}" rm "${archive}" rm -f "${prebuilt}.xz" time 7z a -mx8 -txz "${prebuilt}.xz" "${prebuilt}" rm "$prebuilt" ================================================ FILE: ci/release_dir ================================================ #!/bin/bash files=" out/binaries/gitlab-runner-darwin-amd64 out/release/binaries/gitlab-runner-darwin-amd64 out/binaries/gitlab-runner-darwin-arm64 out/release/binaries/gitlab-runner-darwin-arm64 out/binaries/gitlab-runner-freebsd-386 out/release/binaries/gitlab-runner-freebsd-386 out/binaries/gitlab-runner-freebsd-amd64 out/release/binaries/gitlab-runner-freebsd-amd64 out/binaries/gitlab-runner-freebsd-arm out/release/binaries/gitlab-runner-freebsd-arm out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-arm out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.arm out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-arm64 out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.arm64 out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-ppc64le out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.ppc64le out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-riscv64 out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.riscv64 out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-loong64 out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.loong64 out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-s390x out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.s390x out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-amd64 out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64 out/binaries/gitlab-runner-helper/gitlab-runner-helper.linux-amd64-fips out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64-fips out/binaries/gitlab-runner-helper/gitlab-runner-helper.windows-amd64.exe out/release/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64-windows.exe out/binaries/gitlab-runner-linux-386 out/release/binaries/gitlab-runner-linux-386 out/binaries/gitlab-runner-linux-amd64 out/release/binaries/gitlab-runner-linux-amd64 out/binaries/gitlab-runner-linux-amd64-fips out/release/binaries/gitlab-runner-linux-amd64-fips out/binaries/gitlab-runner-linux-arm out/release/binaries/gitlab-runner-linux-arm out/binaries/gitlab-runner-linux-arm64 out/release/binaries/gitlab-runner-linux-arm64 out/binaries/gitlab-runner-linux-ppc64le out/release/binaries/gitlab-runner-linux-ppc64le out/binaries/gitlab-runner-linux-riscv64 out/release/binaries/gitlab-runner-linux-riscv64 out/binaries/gitlab-runner-linux-loong64 out/release/binaries/gitlab-runner-linux-loong64 out/binaries/gitlab-runner-linux-s390x out/release/binaries/gitlab-runner-linux-s390x out/binaries/gitlab-runner-windows-386.exe out/release/binaries/gitlab-runner-windows-386.exe out/binaries/gitlab-runner-windows-386.zip out/release/binaries/gitlab-runner-windows-386.zip out/binaries/gitlab-runner-windows-amd64.exe out/release/binaries/gitlab-runner-windows-amd64.exe out/binaries/gitlab-runner-windows-amd64.zip out/release/binaries/gitlab-runner-windows-amd64.zip out/binaries/gitlab-runner-windows-arm64.exe out/release/binaries/gitlab-runner-windows-arm64.exe out/binaries/gitlab-runner-windows-arm64.zip out/release/binaries/gitlab-runner-windows-arm64.zip out/deb/gitlab-runner_amd64.deb out/release/deb/gitlab-runner_amd64.deb out/deb/gitlab-runner_arm64.deb out/release/deb/gitlab-runner_arm64.deb out/deb/gitlab-runner_armhf.deb out/release/deb/gitlab-runner_armhf.deb out/deb/gitlab-runner_i386.deb out/release/deb/gitlab-runner_i386.deb out/deb/gitlab-runner_ppc64el.deb out/release/deb/gitlab-runner_ppc64el.deb out/deb/gitlab-runner_riscv64.deb out/release/deb/gitlab-runner_riscv64.deb out/deb/gitlab-runner_loong64.deb out/release/deb/gitlab-runner_loong64.deb out/deb/gitlab-runner_s390x.deb out/release/deb/gitlab-runner_s390x.deb out/deb/gitlab-runner-helper-images.deb out/release/deb/gitlab-runner-helper-images.deb out/helper-images/prebuilt-alpine-arm.tar.xz out/release/helper-images/prebuilt-alpine-arm.tar.xz out/helper-images/prebuilt-alpine-arm64.tar.xz out/release/helper-images/prebuilt-alpine-arm64.tar.xz out/helper-images/prebuilt-alpine-edge-arm.tar.xz out/release/helper-images/prebuilt-alpine-edge-arm.tar.xz out/helper-images/prebuilt-alpine-edge-arm64.tar.xz out/release/helper-images/prebuilt-alpine-edge-arm64.tar.xz out/helper-images/prebuilt-alpine-edge-ppc64le.tar.xz out/release/helper-images/prebuilt-alpine-edge-ppc64le.tar.xz out/helper-images/prebuilt-alpine-edge-riscv64.tar.xz out/release/helper-images/prebuilt-alpine-edge-riscv64.tar.xz out/helper-images/prebuilt-alpine-edge-s390x.tar.xz out/release/helper-images/prebuilt-alpine-edge-s390x.tar.xz out/helper-images/prebuilt-alpine-edge-x86_64.tar.xz out/release/helper-images/prebuilt-alpine-edge-x86_64.tar.xz out/helper-images/prebuilt-alpine-latest-arm.tar.xz out/release/helper-images/prebuilt-alpine-latest-arm.tar.xz out/helper-images/prebuilt-alpine-latest-arm64.tar.xz out/release/helper-images/prebuilt-alpine-latest-arm64.tar.xz out/helper-images/prebuilt-alpine-latest-ppc64le.tar.xz out/release/helper-images/prebuilt-alpine-latest-ppc64le.tar.xz out/helper-images/prebuilt-alpine-latest-riscv64.tar.xz out/release/helper-images/prebuilt-alpine-latest-riscv64.tar.xz out/helper-images/prebuilt-alpine-latest-s390x.tar.xz out/release/helper-images/prebuilt-alpine-latest-s390x.tar.xz out/helper-images/prebuilt-alpine-latest-x86_64.tar.xz out/release/helper-images/prebuilt-alpine-latest-x86_64.tar.xz out/helper-images/prebuilt-alpine-ppc64le.tar.xz out/release/helper-images/prebuilt-alpine-ppc64le.tar.xz out/helper-images/prebuilt-alpine-riscv64.tar.xz out/release/helper-images/prebuilt-alpine-riscv64.tar.xz out/helper-images/prebuilt-alpine-s390x.tar.xz out/release/helper-images/prebuilt-alpine-s390x.tar.xz out/helper-images/prebuilt-alpine-x86_64-pwsh.tar.xz out/release/helper-images/prebuilt-alpine-x86_64-pwsh.tar.xz out/helper-images/prebuilt-alpine-x86_64.tar.xz out/release/helper-images/prebuilt-alpine-x86_64.tar.xz out/helper-images/prebuilt-alpine3.21-arm.tar.xz out/release/helper-images/prebuilt-alpine3.21-arm.tar.xz out/helper-images/prebuilt-alpine3.21-arm64.tar.xz out/release/helper-images/prebuilt-alpine3.21-arm64.tar.xz out/helper-images/prebuilt-alpine3.21-ppc64le.tar.xz out/release/helper-images/prebuilt-alpine3.21-ppc64le.tar.xz out/helper-images/prebuilt-alpine3.21-s390x.tar.xz out/release/helper-images/prebuilt-alpine3.21-s390x.tar.xz out/helper-images/prebuilt-alpine3.21-x86_64-pwsh.tar.xz out/release/helper-images/prebuilt-alpine3.21-x86_64-pwsh.tar.xz out/helper-images/prebuilt-alpine3.21-x86_64.tar.xz out/release/helper-images/prebuilt-alpine3.21-x86_64.tar.xz out/helper-images/prebuilt-ubi-fips-x86_64.tar.xz out/release/helper-images/prebuilt-ubi-fips-x86_64.tar.xz out/helper-images/prebuilt-ubuntu-arm.tar.xz out/release/helper-images/prebuilt-ubuntu-arm.tar.xz out/helper-images/prebuilt-ubuntu-arm64.tar.xz out/release/helper-images/prebuilt-ubuntu-arm64.tar.xz out/helper-images/prebuilt-ubuntu-ppc64le.tar.xz out/release/helper-images/prebuilt-ubuntu-ppc64le.tar.xz out/helper-images/prebuilt-ubuntu-s390x.tar.xz out/release/helper-images/prebuilt-ubuntu-s390x.tar.xz out/helper-images/prebuilt-ubuntu-x86_64-pwsh.tar.xz out/release/helper-images/prebuilt-ubuntu-x86_64-pwsh.tar.xz out/helper-images/prebuilt-ubuntu-x86_64.tar.xz out/release/helper-images/prebuilt-ubuntu-x86_64.tar.xz out/rpm/gitlab-runner_aarch64.rpm out/release/rpm/gitlab-runner_aarch64.rpm out/rpm/gitlab-runner_x86_64-fips.rpm out/release/rpm/gitlab-runner_x86_64-fips.rpm out/rpm/gitlab-runner_x86_64.rpm out/release/rpm/gitlab-runner_x86_64.rpm out/rpm/gitlab-runner_armhfp.rpm out/release/rpm/gitlab-runner_armhfp.rpm out/rpm/gitlab-runner_i686.rpm out/release/rpm/gitlab-runner_i686.rpm out/rpm/gitlab-runner_ppc64le.rpm out/release/rpm/gitlab-runner_ppc64le.rpm out/rpm/gitlab-runner_riscv64.rpm out/release/rpm/gitlab-runner_riscv64.rpm out/rpm/gitlab-runner_loongarch64.rpm out/release/rpm/gitlab-runner_loongarch64.rpm out/rpm/gitlab-runner_s390x.rpm out/release/rpm/gitlab-runner_s390x.rpm out/rpm/gitlab-runner-helper-images.rpm out/release/rpm/gitlab-runner-helper-images.rpm out/zoneinfo.zip out/release/zoneinfo.zip " rm -rf out/release echo "$files" | while read -r src dst; do if [ -z "$src" ] || [ -z "$dst" ]; then continue fi # Check if source file exists if [ ! -e "$src" ]; then echo "source file does not exist: $src" >&2 exit 1 fi dst_dir="$(dirname "$dst")" if [ ! -d "$dst_dir" ]; then mkdir -p "$dst_dir" fi ln -sf "$(realpath "$src")" "$dst" echo "symlink: $src -> $dst" done ================================================ FILE: ci/release_s3 ================================================ #!/usr/bin/env bash set -eo pipefail # Check if the AWS CLI is installed if ! command -v aws &> /dev/null then echo "AWS CLI not found. Please install it to proceed." exit 1 fi echo "AWS CLI is installed." aws configure set s3.max_concurrent_requests 50 refName=${CI_MERGE_REQUEST_SOURCE_BRANCH_NAME:-$CI_COMMIT_REF_NAME} # upload to ref name prefix aws s3 cp out/release/ "s3://$ARTIFACTS_S3_BUCKET/${refName}/" --acl public-read --recursive --no-progress echo -e "\n\033[1m==> Download index file: \033[36mhttps://$ARTIFACTS_S3_BUCKET.s3.amazonaws.com/${refName}/index.html\033[0m\n" # if latest, then sync refName to latest prefix if [[ -n "${IS_LATEST}" ]]; then aws s3 sync --delete "s3://$ARTIFACTS_S3_BUCKET/${refName}/" "s3://$ARTIFACTS_S3_BUCKET/latest/" --acl public-read --no-progress echo -e "\n\033[1m==> Download index file: \033[36mhttps://$ARTIFACTS_S3_BUCKET.s3.amazonaws.com/latest/index.html\033[0m\n" fi ================================================ FILE: ci/rpm_verify_fips ================================================ #!/usr/bin/env bash set -e set -u set -o pipefail # This script needs to run in the container registry.gitlab.com/gitlab-org/cloud-native/container-dependencies-finder/cdf:main, # See: https://gitlab.com/gitlab-org/cloud-native/container-dependencies-finder/-/blob/908117772ed868dd3c30b8621b57def4ef27e0f3/templates/rpm-verify-fips/template.yml : "${SCRATCH_DIR:=/tmp}" : "${LOGS_DIR:=logs}" : "${OCI_TARS:=}" # Handling for downstream rpm_verify_fips script : "${RPM_VERIFY_NOMTIME:=false}" main() { mkdir -p "${SCRATCH_DIR}" "${LOGS_DIR}" local desc name ociTar dockerTar tmpDir rootfs log for desc in $OCI_TARS ; do name="$( cut -d= -f1 <<< "$desc" )" ociTar="$( cut -d= -f2 <<< "$desc" )" tmpDir="${SCRATCH_DIR}/${name}" dockerTar="${tmpDir}/docker.tar" rootfs="${tmpDir}/rootfs" log="${LOGS_DIR}/${name}-rpm_verify_fips.log" echo >&2 "## ---- checking ${name} image (tar: ${ociTar}, log: ${log})" mkdir -p "${rootfs}" # convert from oci -> docker skopeo ${VERBOSE+--debug} copy --multi-arch=all "oci-archive:${ociTar}" "docker-archive:${dockerTar}" # export the rootfs crane ${VERBOSE+-v} export - - <"${dockerTar}" | tar -x${VERBOSE+v} -C "${rootfs}" LOG_FILE="${log}" INSTALL_ROOT="${rootfs}" RPM_VERIFY_NOMTIME="${RPM_VERIFY_NOMTIME}" rpm_verify_fips rm -rf "${tmpDir}" done } main "$@" ================================================ FILE: ci/touch_git ================================================ #!/usr/bin/env sh # modify git files to reflect their last change date git ls-files | while read file; do commit_date=$(git log -1 --format=%cd --date=unix "$file") touch -d "@$commit_date" "$file" done ================================================ FILE: ci/touch_git.ps1 ================================================ # modify git files to reflect their last change date git ls-files | ForEach-Object { $commit_date = git log -1 --format=%cd --date=iso-strict $_ (Get-Item $_).LastWriteTime = [DateTime]::Parse($commit_date) } ================================================ FILE: ci/version ================================================ #!/usr/bin/env bash set -eo pipefail version=$(cat VERSION || echo dev | sed -e 's/^v//g') exact_tag=$(git describe --exact-match 2>/dev/null | sed -e 's/^v//g' || echo "") if echo "${exact_tag}" | grep -qE "^[0-9]+\.[0-9]+\.[0-9]+(-rc[0-9]+)?$"; then echo "$exact_tag" exit 0 fi last_tag=$(git describe --abbrev=0 --exclude='*-rc*' --exclude='helpers/runner_wrapper/api/v*') commits=$(git rev-list --count "${last_tag}..HEAD") revision=$(git rev-parse --short=8 HEAD || echo unknown) echo "${version}~pre.${commits}.g${revision}" ================================================ FILE: commands/builds_helper.go ================================================ package commands import ( "fmt" "math" "net/http" "strings" "sync" "time" "github.com/prometheus/client_golang/prometheus" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/session" ) const ( concurrencyIncreaseFactor = 1.1 // +10% concurrencyDecreaseFactor = 0.95 // -5% ) var numBuildsDesc = prometheus.NewDesc( "gitlab_runner_jobs", "The current number of running builds.", []string{"runner", "runner_name", "system_id", "state", "stage", "executor_stage"}, nil, ) var requestConcurrencyDesc = prometheus.NewDesc( "gitlab_runner_request_concurrency", "The current number of concurrent requests for a new job", []string{"runner", "system_id"}, nil, ) var requestConcurrencyExceededDesc = prometheus.NewDesc( "gitlab_runner_request_concurrency_exceeded_total", "Count of excess requests above the configured request_concurrency limit", []string{"runner", "system_id"}, nil, ) var requestConcurrencyHardLimitDesc = prometheus.NewDesc( "gitlab_runner_request_concurrency_hard_limit", "Configured request_concurrency limit", []string{"runner", "system_id"}, nil, ) var requestConcurrencyAdaptiveLimitDesc = prometheus.NewDesc( "gitlab_runner_request_concurrency_adaptive_limit", "Computed adaptive request concurrency limit", []string{"runner", "system_id"}, nil, ) var requestConcurrencyUsedLimitDesc = prometheus.NewDesc( "gitlab_runner_request_concurrency_used_limit", "Used request concurrency limit", []string{"runner", "system_id"}, nil, ) type statePermutation struct { runner string runnerName string systemID string buildState common.BuildRuntimeState buildStage common.BuildStage executorStage common.ExecutorStage } func newStatePermutationFromBuild(build *common.Build) statePermutation { return statePermutation{ runner: build.Runner.ShortDescription(), runnerName: build.Runner.Name, systemID: build.Runner.GetSystemID(), buildState: build.CurrentState(), buildStage: build.CurrentStage(), executorStage: build.CurrentExecutorStage(), } } type runnerCounter struct { systemID string runnerName string builds int requests int hardConcurrencyLimit int adaptiveConcurrencyLimit float64 usedConcurrencyLimit int requestConcurrencyExceeded int } type buildsHelper struct { counters map[string]*runnerCounter buildStagesStartTimes map[*common.Build]map[common.BuildStage]time.Time builds []*common.Build lock sync.Mutex jobsTotal *prometheus.CounterVec jobExecutionModeTotal *prometheus.CounterVec jobDurationHistogram *prometheus.HistogramVec jobStagesDurationHistogram *prometheus.HistogramVec jobQueueDurationHistogram *prometheus.HistogramVec jobQueueSize *prometheus.GaugeVec jobQueueDepth *prometheus.GaugeVec acceptableJobQueuingDurationExceeded *prometheus.CounterVec } func (b *buildsHelper) getRunnerCounter(runner *common.RunnerConfig) *runnerCounter { if b.counters == nil { b.counters = make(map[string]*runnerCounter) } counter := b.counters[runner.Token] if counter == nil { counter = &runnerCounter{systemID: runner.GetSystemID(), runnerName: runner.Name} b.counters[runner.Token] = counter b.jobsTotal.WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()).Add(0) } return counter } func (b *buildsHelper) findSessionByURL(url string) (*session.Session, error) { if url == "" { return nil, fmt.Errorf("empty URL provided") } b.lock.Lock() defer b.lock.Unlock() if len(b.builds) == 0 { return nil, fmt.Errorf("no active builds found") } for _, build := range b.builds { if build.Session == nil { continue } if build.Session.Endpoint == "" { continue } if strings.HasPrefix(url, build.Session.Endpoint+"/") { return build.Session, nil } } return nil, fmt.Errorf("no session found matching URL: %s", url) } func (b *buildsHelper) acquireBuild(runner *common.RunnerConfig) bool { b.lock.Lock() defer b.lock.Unlock() counter := b.getRunnerCounter(runner) if runner.Limit > 0 && counter.builds >= runner.Limit { // Too many builds return false } counter.builds++ return true } func (b *buildsHelper) releaseBuild(runner *common.RunnerConfig) bool { b.lock.Lock() defer b.lock.Unlock() counter := b.getRunnerCounter(runner) if counter.builds > 0 { counter.builds-- return true } return false } func (b *buildsHelper) acquireRequest(runner *common.RunnerConfig) bool { b.lock.Lock() defer b.lock.Unlock() counter := b.getRunnerCounter(runner) concurrency := runner.GetRequestConcurrency() counter.hardConcurrencyLimit = concurrency if runner.IsFeatureFlagOn(featureflags.UseAdaptiveRequestConcurrency) { // concurrency is the adaptive concurrency value rounded up, between 1 and the max request concurrency concurrency = min(max(1, int(math.Ceil(counter.adaptiveConcurrencyLimit))), runner.GetRequestConcurrency()) } counter.usedConcurrencyLimit = concurrency if counter.requests >= concurrency { counter.requestConcurrencyExceeded++ return false } counter.requests++ return true } func (b *buildsHelper) releaseRequest(runner *common.RunnerConfig, hasJob bool) bool { b.lock.Lock() defer b.lock.Unlock() counter := b.getRunnerCounter(runner) if runner.IsFeatureFlagOn(featureflags.UseAdaptiveRequestConcurrency) { // if the request returned a job, increase the concurrency by 10%, if not, decrease by 5% if hasJob { counter.adaptiveConcurrencyLimit *= concurrencyIncreaseFactor } else { counter.adaptiveConcurrencyLimit *= concurrencyDecreaseFactor } // adjust adaptive concurrency between 1 and max request concurrency counter.adaptiveConcurrencyLimit = min(max(1, counter.adaptiveConcurrencyLimit), float64(runner.GetRequestConcurrency())) } if counter.requests > 0 { counter.requests-- return true } return false } func (b *buildsHelper) addBuild(build *common.Build) { if build == nil { return } b.lock.Lock() defer b.lock.Unlock() runners := make(map[int]bool) projectRunners := make(map[int]bool) for _, otherBuild := range b.builds { if otherBuild.Runner.Token != build.Runner.Token { continue } runners[otherBuild.RunnerID] = true if otherBuild.JobInfo.ProjectID != build.JobInfo.ProjectID { continue } projectRunners[otherBuild.ProjectRunnerID] = true } for runners[build.RunnerID] { build.RunnerID++ } for projectRunners[build.ProjectRunnerID] { build.ProjectRunnerID++ } b.builds = append(b.builds, build) b.jobsTotal.WithLabelValues(build.Runner.ShortDescription(), build.Runner.Name, build.Runner.GetSystemID()).Inc() b.jobQueueDurationHistogram. WithLabelValues( build.Runner.ShortDescription(), build.Runner.Name, build.Runner.GetSystemID(), build.JobInfo.ProjectJobsRunningOnInstanceRunnersCount, ). Observe(build.JobInfo.TimeInQueueSeconds) b.jobQueueSize. WithLabelValues( build.Runner.ShortDescription(), build.Runner.Name, build.Runner.GetSystemID(), ). Set(float64(build.JobInfo.QueueSize)) b.jobQueueDepth. WithLabelValues( build.Runner.ShortDescription(), build.Runner.Name, build.Runner.GetSystemID(), ). Set(float64(build.JobInfo.QueueDepth)) b.evaluateJobQueuingDuration(build.Runner, build.JobInfo) build.OnJobExecutionModeDispatchedFn = b.handleOnJobExecutionModeDispatched b.initializeBuildStageMetrics(build) } func (b *buildsHelper) evaluateJobQueuingDuration(runner *common.RunnerConfig, jobInfo spec.JobInfo) { counterForRunner := b.acceptableJobQueuingDurationExceeded. WithLabelValues( runner.ShortDescription(), runner.Name, runner.GetSystemID(), ) // This .Add(0) will not change the value of the metric when threshold was // not exceeded, but will make sure that the metric for each runner is always // available counterForRunner.Add(0) // If configuration is not present we don't care about the metric if runner.Monitoring == nil || len(runner.Monitoring.JobQueuingDurations) < 1 { return } jobQueueDurationCfg := runner.Monitoring.JobQueuingDurations.GetActiveConfiguration() // If no configuration matches current time we don't care about the metric if jobQueueDurationCfg == nil { return } threshold := jobQueueDurationCfg.Threshold.Seconds() // Threshold not configured, zeroed or invalid (negative) means we're not interested in this feature if threshold <= 0 { return } // If threshold is not exceeded, then all is good and there is no need for other checks if jobInfo.TimeInQueueSeconds <= threshold { return } // If JobProjectsRunningOnInstanceRunnersCount doesn't match the definition it means that exceeded // threshold is acceptable in such case. // If the definition was not configured (or the regular expression in the config.toml file was invalid // and couldn't be compiled) we treat that as "matched" and count the case in if !jobQueueDurationCfg.JobsRunningForProjectMatched(jobInfo.ProjectJobsRunningOnInstanceRunnersCount) { return } // Timing expectation not met for this case. Let's increase the counter counterForRunner.Inc() } func (b *buildsHelper) removeBuild(deleteBuild *common.Build) bool { b.lock.Lock() defer b.lock.Unlock() mode := deleteBuild.DispatchedJobExecutionMode().OrUnknown() b.jobDurationHistogram. WithLabelValues(deleteBuild.Runner.ShortDescription(), deleteBuild.Runner.Name, deleteBuild.Runner.GetSystemID(), string(mode)). Observe(deleteBuild.FinalDuration().Seconds()) for idx, build := range b.builds { if build == deleteBuild { b.builds = append(b.builds[0:idx], b.builds[idx+1:]...) delete(b.buildStagesStartTimes, deleteBuild) return true } } return false } func (b *buildsHelper) buildsCount() int { b.lock.Lock() defer b.lock.Unlock() return len(b.builds) } func (b *buildsHelper) statesAndStages() map[statePermutation]int { b.lock.Lock() defer b.lock.Unlock() data := make(map[statePermutation]int) for token, counter := range b.counters { // 'idle' state will ensure the metric is always present, even if no // builds are being processed at the moment idleState := statePermutation{ runner: helpers.ShortenToken(token), runnerName: counter.runnerName, systemID: counter.systemID, buildState: "idle", buildStage: "idle", executorStage: "idle", } data[idleState] = 0 } for _, build := range b.builds { state := newStatePermutationFromBuild(build) data[state]++ } return data } func (b *buildsHelper) runnersCounters() map[string]*runnerCounter { b.lock.Lock() defer b.lock.Unlock() data := make(map[string]*runnerCounter) for token, counter := range b.counters { data[helpers.ShortenToken(token)] = counter } return data } func (b *buildsHelper) initializeBuildStageMetrics(build *common.Build) { if !build.IsFeatureFlagOn(featureflags.ExportHighCardinalityMetrics) { return } // the receiver lock is held at this point if b.buildStagesStartTimes == nil { b.buildStagesStartTimes = make(map[*common.Build]map[common.BuildStage]time.Time) } if b.buildStagesStartTimes[build] == nil { b.buildStagesStartTimes[build] = make(map[common.BuildStage]time.Time) } build.OnBuildStageStartFn = func(stage common.BuildStage) { b.handleOnBuildStageStart(build, stage) } build.OnBuildStageEndFn = func(stage common.BuildStage) { b.handleOnBuildStageEnd(build, stage) } } func (b *buildsHelper) handleOnBuildStageStart(build *common.Build, stage common.BuildStage) { b.lock.Lock() b.buildStagesStartTimes[build][stage] = time.Now() b.lock.Unlock() } func (b *buildsHelper) handleOnBuildStageEnd(build *common.Build, stage common.BuildStage) { b.lock.Lock() duration := time.Since(b.buildStagesStartTimes[build][stage]) b.lock.Unlock() b.jobStagesDurationHistogram. With(prometheus.Labels{ "runner": build.Runner.ShortDescription(), "runner_name": build.Runner.Name, "system_id": build.Runner.GetSystemID(), "stage": string(stage), }). Observe(duration.Seconds()) } func (b *buildsHelper) handleOnJobExecutionModeDispatched(mode common.JobExecutionMode, executor string) { if executor == "" { executor = "unknown" } b.jobExecutionModeTotal.WithLabelValues(string(mode), executor).Inc() } // Describe implements prometheus.Collector. func (b *buildsHelper) Describe(ch chan<- *prometheus.Desc) { ch <- numBuildsDesc ch <- requestConcurrencyDesc ch <- requestConcurrencyExceededDesc ch <- requestConcurrencyHardLimitDesc ch <- requestConcurrencyAdaptiveLimitDesc ch <- requestConcurrencyUsedLimitDesc b.jobsTotal.Describe(ch) b.jobExecutionModeTotal.Describe(ch) b.jobDurationHistogram.Describe(ch) b.jobQueueDurationHistogram.Describe(ch) b.jobQueueSize.Describe(ch) b.jobQueueDepth.Describe(ch) b.acceptableJobQueuingDurationExceeded.Describe(ch) b.jobStagesDurationHistogram.Describe(ch) } // Collect implements prometheus.Collector. func (b *buildsHelper) Collect(ch chan<- prometheus.Metric) { builds := b.statesAndStages() for state, count := range builds { ch <- prometheus.MustNewConstMetric( numBuildsDesc, prometheus.GaugeValue, float64(count), state.runner, state.runnerName, state.systemID, string(state.buildState), string(state.buildStage), string(state.executorStage), ) } counters := b.runnersCounters() for runner, counter := range counters { ch <- prometheus.MustNewConstMetric( requestConcurrencyDesc, prometheus.GaugeValue, float64(counter.requests), runner, counter.systemID, ) ch <- prometheus.MustNewConstMetric( requestConcurrencyExceededDesc, prometheus.CounterValue, float64(counter.requestConcurrencyExceeded), runner, counter.systemID, ) ch <- prometheus.MustNewConstMetric( requestConcurrencyHardLimitDesc, prometheus.GaugeValue, float64(counter.hardConcurrencyLimit), runner, counter.systemID, ) ch <- prometheus.MustNewConstMetric( requestConcurrencyAdaptiveLimitDesc, prometheus.GaugeValue, counter.adaptiveConcurrencyLimit, runner, counter.systemID, ) ch <- prometheus.MustNewConstMetric( requestConcurrencyUsedLimitDesc, prometheus.GaugeValue, float64(counter.usedConcurrencyLimit), runner, counter.systemID, ) } b.jobsTotal.Collect(ch) b.jobExecutionModeTotal.Collect(ch) b.jobDurationHistogram.Collect(ch) b.jobQueueDurationHistogram.Collect(ch) b.jobQueueSize.Collect(ch) b.jobQueueDepth.Collect(ch) b.acceptableJobQueuingDurationExceeded.Collect(ch) b.jobStagesDurationHistogram.Collect(ch) } func (b *buildsHelper) ListJobsHandler(w http.ResponseWriter, r *http.Request) { w.Header().Add("X-List-Version", "2") w.Header().Add(common.ContentType, "text/plain") w.WriteHeader(http.StatusOK) b.lock.Lock() defer b.lock.Unlock() for _, job := range b.builds { _, _ = fmt.Fprintf( w, "url=%s state=%s stage=%s executor_stage=%s duration=%s\n", job.JobURL(), job.CurrentState(), job.CurrentStage(), job.CurrentExecutorStage(), job.CurrentDuration(), ) } } func newBuildsHelper() buildsHelper { return buildsHelper{ jobsTotal: prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_jobs_total", Help: "Total number of handled jobs", }, []string{"runner", "runner_name", "system_id"}, ), jobExecutionModeTotal: prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_job_execution_mode_total", Help: "Total number of jobs grouped by execution mode and executor", }, []string{"mode", "executor"}, ), jobDurationHistogram: prometheus.NewHistogramVec( prometheus.HistogramOpts{ Name: "gitlab_runner_job_duration_seconds", Help: "Histogram of job durations", Buckets: []float64{30, 60, 300, 600, 1800, 3600, 7200, 10800, 18000, 36000}, }, []string{"runner", "runner_name", "system_id", "mode"}, ), jobQueueDurationHistogram: prometheus.NewHistogramVec( prometheus.HistogramOpts{ Name: "gitlab_runner_job_queue_duration_seconds", Help: "A histogram representing job queue duration.", Buckets: []float64{1, 3, 10, 30, 60, 120, 300, 900, 1800, 3600}, }, []string{"runner", "runner_name", "system_id", "project_jobs_running"}, ), jobQueueSize: prometheus.NewGaugeVec( prometheus.GaugeOpts{ Name: "gitlab_runner_job_queue_size", Help: "A gauge representing the size of the queue for the runner", }, []string{"runner", "runner_name", "system_id"}, ), jobQueueDepth: prometheus.NewGaugeVec( prometheus.GaugeOpts{ Name: "gitlab_runner_job_queue_depth", Help: "A gauge representing the search depth in the queue for the runner", }, []string{"runner", "runner_name", "system_id"}, ), acceptableJobQueuingDurationExceeded: prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_acceptable_job_queuing_duration_exceeded_total", Help: "Counts how often jobs exceed the configured queuing time threshold", }, []string{"runner", "runner_name", "system_id"}, ), jobStagesDurationHistogram: prometheus.NewHistogramVec( prometheus.HistogramOpts{ Name: "gitlab_runner_job_stage_duration_seconds", Help: "Histogram of each job stage duration", Buckets: []float64{1, 3, 10, 30, 60, 120, 300, 900, 1800, 3600}, }, []string{"runner", "runner_name", "system_id", "stage"}, ), } } ================================================ FILE: commands/builds_helper_integration_test.go ================================================ //go:build integration package commands import ( "io" "runtime" "testing" "time" "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildtest" shell_executor "gitlab.com/gitlab-org/gitlab-runner/executors/shell" ) func TestBuildsHelperCollect(t *testing.T) { dir := t.TempDir() ch := make(chan prometheus.Metric, 50) b := newBuildsHelper() longRunningBuild, err := common.GetLongRunningBuild() require.NoError(t, err) shell := "bash" if runtime.GOOS == "windows" { shell = "powershell" } systemID, err := configfile.GenerateUniqueSystemID() require.NoError(t, err) build := &common.Build{ Job: longRunningBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ BuildsDir: dir, Executor: "shell", Shell: shell, }, SystemID: systemID, }, ExecutorProvider: shell_executor.NewProvider("gitlab-runner"), } trace := &common.Trace{Writer: io.Discard} done := make(chan error) go func() { done <- buildtest.RunBuildWithTrace(t, build, trace) }() b.builds = append(b.builds, build) // collect many logs whilst the build is being executed to trigger any // potential race conditions that arise from the build progressing whilst // metrics are collected. for i := 0; i < 200; i++ { if i == 100 { // Build might have not started yet, wait until cancel is // successful. require.Eventually( t, func() bool { return trace.Abort() }, time.Minute, 10*time.Millisecond, ) } b.Collect(ch) <-ch } err = <-done expected := &common.BuildError{FailureReason: common.JobCanceled} assert.ErrorIs(t, err, expected) } ================================================ FILE: commands/builds_helper_test.go ================================================ //go:build !integration package commands import ( "fmt" "io" "net/http" "net/http/httptest" "testing" "time" "github.com/prometheus/client_golang/prometheus" dto "github.com/prometheus/client_model/go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/config/runner" "gitlab.com/gitlab-org/gitlab-runner/common/config/runner/monitoring" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/session" ) const ( testToken = "testoken" // No typo here! 8 characters to make it equal to the computed ShortDescription() testName = "qwerty123" ) func TestBuildsHelperAcquireRequestWithLimit(t *testing.T) { runner := common.RunnerConfig{ RequestConcurrency: 2, } b := newBuildsHelper() result := b.acquireRequest(&runner) require.True(t, result) result = b.acquireRequest(&runner) require.False(t, result, "allow only one requests (adaptive limit)") result = b.releaseRequest(&runner, false) require.True(t, result) result = b.releaseRequest(&runner, false) require.False(t, result, "release only two requests") } func TestBuildsHelperAcquireRequestWithAdaptiveLimit(t *testing.T) { runner := common.RunnerConfig{ RequestConcurrency: 2, } b := newBuildsHelper() result := b.acquireRequest(&runner) require.True(t, result) result = b.releaseRequest(&runner, true) require.True(t, result) result = b.acquireRequest(&runner) require.True(t, result) result = b.acquireRequest(&runner) require.False(t, result, "allow only two requests") result = b.releaseRequest(&runner, false) require.True(t, result) result = b.releaseRequest(&runner, false) require.False(t, result, "release only two requests") } func TestBuildsHelperAcquireRequestWithDefault(t *testing.T) { runner := common.RunnerConfig{ RequestConcurrency: 0, } b := newBuildsHelper() result := b.acquireRequest(&runner) require.True(t, result) result = b.acquireRequest(&runner) require.False(t, result, "allow only one request") result = b.releaseRequest(&runner, false) require.True(t, result) result = b.releaseRequest(&runner, false) require.False(t, result, "release only one request") result = b.acquireRequest(&runner) require.True(t, result) result = b.releaseRequest(&runner, false) require.True(t, result) result = b.releaseRequest(&runner, false) require.False(t, result, "nothing to release") } func TestBuildsHelperAcquireBuildWithLimit(t *testing.T) { runner := common.RunnerConfig{ Limit: 1, } b := newBuildsHelper() result := b.acquireBuild(&runner) require.True(t, result) result = b.acquireBuild(&runner) require.False(t, result, "allow only one build") result = b.releaseBuild(&runner) require.True(t, result) result = b.releaseBuild(&runner) require.False(t, result, "release only one build") } func TestBuildsHelperAcquireBuildUnlimited(t *testing.T) { runner := common.RunnerConfig{ Limit: 0, } b := newBuildsHelper() result := b.acquireBuild(&runner) require.True(t, result) result = b.acquireBuild(&runner) require.True(t, result) result = b.releaseBuild(&runner) require.True(t, result) result = b.releaseBuild(&runner) require.True(t, result) } func TestBuildsHelperFindSessionByURL(t *testing.T) { sess, err := session.NewSession(nil) require.NoError(t, err) build := common.Build{ Session: sess, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "abcd1234", }, }, } h := newBuildsHelper() h.addBuild(&build) foundSession, err := h.findSessionByURL(sess.Endpoint + "/action") require.NoError(t, err) assert.Equal(t, sess, foundSession) foundSession, err = h.findSessionByURL("/session/hash/action") assert.Nil(t, foundSession) assert.Error(t, err) assert.Contains(t, err.Error(), "no session found matching URL") // Test empty URL foundSession, err = h.findSessionByURL("") assert.Nil(t, foundSession) assert.Error(t, err) assert.Contains(t, err.Error(), "empty URL provided") // Test with no builds h = newBuildsHelper() foundSession, err = h.findSessionByURL(sess.Endpoint + "/action") assert.Nil(t, foundSession) assert.Error(t, err) assert.Contains(t, err.Error(), "no active builds found") } func TestBuildsHelper_ListJobsHandler(t *testing.T) { tests := map[string]struct { build *common.Build expectedOutput []string }{ "no jobs": { build: nil, }, "job exists": { build: &common.Build{ Runner: &common.RunnerConfig{}, Job: spec.Job{ ID: 1, JobInfo: spec.JobInfo{ProjectID: 1}, GitInfo: spec.GitInfo{RepoURL: "https://gitlab.example.com/my-namespace/my-project.git"}, }, }, expectedOutput: []string{ "url=https://gitlab.example.com/my-namespace/my-project/-/jobs/1", }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { writer := httptest.NewRecorder() req, err := http.NewRequest(http.MethodGet, "/", nil) require.NoError(t, err) b := newBuildsHelper() b.addBuild(test.build) b.ListJobsHandler(writer, req) resp := writer.Result() defer resp.Body.Close() assert.Equal(t, http.StatusOK, resp.StatusCode) assert.Equal(t, "2", resp.Header.Get("X-List-Version")) assert.Equal(t, "text/plain", resp.Header.Get(common.ContentType)) body, err := io.ReadAll(resp.Body) require.NoError(t, err) if len(test.expectedOutput) == 0 { assert.Empty(t, body) return } for _, expectedOutput := range test.expectedOutput { assert.Contains(t, string(body), expectedOutput) } }) } } func TestRestrictHTTPMethods(t *testing.T) { tests := map[string]int{ http.MethodGet: http.StatusOK, http.MethodHead: http.StatusOK, http.MethodPost: http.StatusMethodNotAllowed, "FOOBAR": http.StatusMethodNotAllowed, } for method, expectedStatusCode := range tests { t.Run(method, func(t *testing.T) { mux := http.NewServeMux() mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { _, _ = w.Write([]byte("hello world")) }) server := httptest.NewServer(restrictHTTPMethods(mux, http.MethodGet, http.MethodHead)) req, err := http.NewRequest(method, server.URL, nil) require.NoError(t, err) resp, err := server.Client().Do(req) require.NoError(t, err) require.Equal(t, expectedStatusCode, resp.StatusCode) }) } } func TestBuildsHelper_evaluateJobQueuingDuration(t *testing.T) { type jobInfo struct { timeInQueueSeconds float64 projectJobsRunningOnInstanceRunnersCount string } basicJob := jobInfo{ timeInQueueSeconds: (15 * time.Second).Seconds(), projectJobsRunningOnInstanceRunnersCount: "0", } tc := map[string]struct { monitoringSectionMissing bool jobQueuingSectionMissing bool threshold time.Duration jobsRunningForProject string jobInfo jobInfo expectedValue float64 }{ "no monitoring section in configuration": { monitoringSectionMissing: true, jobInfo: basicJob, expectedValue: 0, }, "no jobQueuingDuration section in configuration": { jobQueuingSectionMissing: true, jobInfo: basicJob, expectedValue: 0, }, "zeroed configuration": { jobInfo: basicJob, expectedValue: 0, }, "jobsRunningForProject not configured and threshold not exceeded": { threshold: 60 * time.Second, jobInfo: basicJob, expectedValue: 0, }, "jobsRunningForProject not configured and threshold exceeded": { threshold: 10 * time.Second, jobInfo: basicJob, expectedValue: 1, }, "jobsRunningForProject configured and matched and threshold not exceeded": { threshold: 60 * time.Second, jobsRunningForProject: ".*", jobInfo: basicJob, expectedValue: 0, }, "jobsRunningForProject configured and matched and threshold exceeded": { threshold: 10 * time.Second, jobsRunningForProject: ".*", jobInfo: basicJob, expectedValue: 1, }, "jobsRunningForProject configured and not matched and threshold not exceeded": { threshold: 60 * time.Second, jobsRunningForProject: "Inf+", jobInfo: basicJob, expectedValue: 0, }, "jobsRunningForProject configured and not matched and threshold exceeded": { threshold: 10 * time.Second, jobsRunningForProject: "Inf+", jobInfo: basicJob, expectedValue: 0, }, } for tn, tt := range tc { t.Run(tn, func(t *testing.T) { build := &common.Build{ Runner: &common.RunnerConfig{ Name: testName, RunnerCredentials: common.RunnerCredentials{ Token: testToken, }, SystemID: "testSystemID", }, Job: spec.Job{ ID: 1, JobInfo: spec.JobInfo{ ProjectID: 1, TimeInQueueSeconds: tt.jobInfo.timeInQueueSeconds, ProjectJobsRunningOnInstanceRunnersCount: tt.jobInfo.projectJobsRunningOnInstanceRunnersCount, }, }, } if !tt.monitoringSectionMissing { build.Runner.Monitoring = &runner.Monitoring{} if !tt.jobQueuingSectionMissing { build.Runner.Monitoring.JobQueuingDurations = monitoring.JobQueuingDurations{ &monitoring.JobQueuingDuration{ Periods: []string{"* * * * * * *"}, Threshold: tt.threshold, JobsRunningForProject: tt.jobsRunningForProject, }, } } require.NoError(t, build.Runner.Monitoring.Compile()) } b := newBuildsHelper() b.addBuild(build) ch := make(chan prometheus.Metric, 1) b.acceptableJobQueuingDurationExceeded.Collect(ch) m := <-ch var mm dto.Metric err := m.Write(&mm) require.NoError(t, err) labels := make(map[string]string) for _, l := range mm.GetLabel() { if !assert.NotNil(t, l.Name) { continue } if !assert.NotNil(t, l.Value) { continue } labels[*l.Name] = *l.Value } assert.Len(t, labels, 3) require.Contains(t, labels, "runner") assert.Equal(t, testToken, labels["runner"]) require.Contains(t, labels, "runner_name") assert.Equal(t, testName, labels["runner_name"]) require.Contains(t, labels, "system_id") assert.Equal(t, build.Runner.SystemID, labels["system_id"]) assert.Equal(t, tt.expectedValue, mm.GetCounter().GetValue()) }) } } func TestJobExecutionModeTotal(t *testing.T) { tests := map[string]struct { mode common.JobExecutionMode executor string expectedExecutor string expectedValue float64 }{ "steps mode": { mode: common.JobExecutionModeSteps, executor: "docker", expectedExecutor: "docker", expectedValue: 1, }, "traditional mode": { mode: common.JobExecutionModeTraditional, executor: "docker+machine", expectedExecutor: "docker+machine", expectedValue: 1, }, "empty executor uses unknown label": { mode: common.JobExecutionModeTraditional, executor: "", expectedExecutor: "unknown", expectedValue: 1, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { build := &common.Build{ Runner: &common.RunnerConfig{ Name: testName, RunnerCredentials: common.RunnerCredentials{ Token: testToken, }, SystemID: "testSystemID", }, Job: spec.Job{ ID: 1, JobInfo: spec.JobInfo{ ProjectID: 1, }, }, } b := newBuildsHelper() b.addBuild(build) build.OnJobExecutionModeDispatchedFn.Call(tt.mode, tt.executor) ch := make(chan prometheus.Metric, 1) b.jobExecutionModeTotal.Collect(ch) m := <-ch var mm dto.Metric err := m.Write(&mm) require.NoError(t, err) assert.Equal(t, tt.expectedValue, mm.GetCounter().GetValue()) labels := make(map[string]string) for _, l := range mm.GetLabel() { if l.Name != nil && l.Value != nil { labels[*l.Name] = *l.Value } } assert.Equal(t, string(tt.mode), labels["mode"]) assert.Equal(t, tt.expectedExecutor, labels["executor"]) }) } } func TestPrepareStageMetrics(t *testing.T) { build := &common.Build{ Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: testToken, }, }, Job: spec.Job{ ID: 1, JobInfo: spec.JobInfo{ ProjectID: 1, }, }, } build.Runner.Environment = append(build.Runner.Environment, fmt.Sprintf("%s=true", featureflags.ExportHighCardinalityMetrics)) bh := newBuildsHelper() bh.addBuild(build) bh.initializeBuildStageMetrics(build) // verify that the FF toggle will work correctly require.NotNil(t, bh.buildStagesStartTimes) bh.handleOnBuildStageStart(build, common.BuildStagePrepare) time.Sleep(100 * time.Millisecond) bh.handleOnBuildStageEnd(build, common.BuildStagePrepare) ch := make(chan prometheus.Metric, 1) bh.jobStagesDurationHistogram.Collect(ch) var mm dto.Metric _ = (<-ch).Write(&mm) require.NotEmpty(t, mm.Label) require.NotNil(t, mm.Histogram) require.Equal(t, int(*mm.Histogram.SampleCount), 1) require.GreaterOrEqual(t, *mm.Histogram.SampleSum, float64(0.1)) } func TestPrepareStageMetricsNoFF(t *testing.T) { build := &common.Build{ Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: testToken, }, }, Job: spec.Job{ ID: 1, JobInfo: spec.JobInfo{ ProjectID: 1, }, }, } bh := newBuildsHelper() bh.addBuild(build) bh.initializeBuildStageMetrics(build) require.Nil(t, bh.buildStagesStartTimes) } func TestEnsureJobsTotalIsZero(t *testing.T) { runner := &common.RunnerConfig{ Name: testName, RunnerCredentials: common.RunnerCredentials{ Token: testToken, }, SystemID: "testSystemID", } bh := newBuildsHelper() bh.getRunnerCounter(runner) ch := make(chan prometheus.Metric, 1) bh.jobsTotal.Collect(ch) var mm dto.Metric err := (<-ch).Write(&mm) require.NoError(t, err) assert.Equal(t, float64(0), mm.GetCounter().GetValue()) labels := make(map[string]string) for _, l := range mm.GetLabel() { if l.Name != nil && l.Value != nil { labels[*l.Name] = *l.Value } } assert.Equal(t, runner.ShortDescription(), labels["runner"]) assert.Equal(t, runner.Name, labels["runner_name"]) assert.Equal(t, runner.GetSystemID(), labels["system_id"]) } ================================================ FILE: commands/config.go ================================================ package commands import ( "os" "path/filepath" "github.com/sirupsen/logrus" ) func GetDefaultConfigFile() string { return filepath.Join(getDefaultConfigDirectory(), "config.toml") } func GetDefaultCertificateDirectory() string { return filepath.Join(getDefaultConfigDirectory(), "certs") } func init() { configFile := os.Getenv("CONFIG_FILE") if configFile == "" { err := os.Setenv("CONFIG_FILE", GetDefaultConfigFile()) if err != nil { logrus.WithError(err).Fatal("Couldn't set CONFIG_FILE environment variable") } } } ================================================ FILE: commands/config_unix.go ================================================ //go:build aix || android || darwin || dragonfly || freebsd || hurd || illumos || linux || netbsd || openbsd || solaris || zos package commands import ( "os" "path/filepath" "gitlab.com/gitlab-org/gitlab-runner/helpers/homedir" ) var ROOTCONFIGDIR = "/etc/gitlab-runner" func getDefaultConfigDirectory() string { hd := homedir.New() if os.Getuid() == 0 { return ROOTCONFIGDIR } else if homeDir := hd.Get(); homeDir != "" { return filepath.Join(homeDir, ".gitlab-runner") } else if currentDir := hd.GetWDOrEmpty(); currentDir != "" { return currentDir } panic("Cannot get default config file location") } ================================================ FILE: commands/config_windows.go ================================================ package commands import ( "gitlab.com/gitlab-org/gitlab-runner/helpers/homedir" ) func getDefaultConfigDirectory() string { if currentDir := homedir.New().GetWDOrEmpty(); currentDir != "" { return currentDir } panic("Cannot get default config file location") } ================================================ FILE: commands/constants.go ================================================ package commands const ( osTypeLinux = "linux" osTypeDarwin = "darwin" osTypeWindows = "windows" osTypeFreeBSD = "freebsd" ) ================================================ FILE: commands/fleeting/fleeting.go ================================================ package fleeting import ( "context" "errors" "fmt" "io" "os" "strings" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/fleeting/fleeting-artifact/pkg/installer" "gitlab.com/gitlab-org/gitlab-runner/commands" "gitlab.com/gitlab-org/gitlab-runner/common" ) var osExit = os.Exit type runnerFleetingPlugin struct { RunnerName string Plugin string } func getPlugins(context *cli.Context) []runnerFleetingPlugin { config := common.NewConfig() err := config.LoadConfig(context.Parent().String("config")) if err != nil { logrus.Fatalln(err) } var results []runnerFleetingPlugin for _, runnerCfg := range config.Runners { if runnerCfg.Autoscaler == nil { continue } results = append(results, runnerFleetingPlugin{ RunnerName: runnerCfg.ShortDescription(), Plugin: runnerCfg.Autoscaler.Plugin, }) } return results } func install(clictx *cli.Context) { var exitCode int plugins := getPlugins(clictx) if len(plugins) == 0 { logrus.Warnln("No plugins to install, review your runner configuration.") } for _, plugin := range plugins { _, err := installer.LookPath(plugin.Plugin, "") if !errors.Is(err, installer.ErrPluginNotFound) && !clictx.Bool("upgrade") { continue } if err := installer.Install(context.Background(), plugin.Plugin); err != nil { exitCode = 1 fmt.Fprintf(os.Stderr, "runner: %v, plugin: %v, install/update error:: %v\n", plugin.RunnerName, plugin.Plugin, err) continue } path, _ := installer.LookPath(plugin.Plugin, "") fmt.Printf("runner: %v, plugin: %v, path: %v\n", plugin.RunnerName, plugin.Plugin, path) } osExit(exitCode) } func list(clictx *cli.Context) { var exitCode int for _, plugin := range getPlugins(clictx) { path, err := installer.LookPath(plugin.Plugin, "") if err != nil { exitCode = 1 fmt.Fprintf(os.Stderr, "runner: %v, plugin: %v, error: %v\n", plugin.RunnerName, plugin.Plugin, err) continue } fmt.Printf("runner: %v, plugin: %v, path: %v\n", plugin.RunnerName, plugin.Plugin, path) } osExit(exitCode) } func login(clictx *cli.Context) error { password := clictx.String("password") if clictx.Bool("password-stdin") { pass, err := io.ReadAll(os.Stdin) if err != nil { fmt.Println("reading password from stdin:", err) osExit(1) } password = strings.TrimSuffix(strings.TrimSuffix(string(pass), "\n"), "\r") } via, err := installer.Login(clictx.Args().Get(0), clictx.String("username"), password) if err != nil { return fmt.Errorf("login: %w", err) } fmt.Println("logged in via", via) return nil } func NewCommand() cli.Command { subcommands := []cli.Command{ { Name: "install", Usage: "install or update fleeting plugins", Flags: []cli.Flag{cli.BoolFlag{Name: "upgrade"}}, Action: install, }, { Name: "list", Usage: "list installed plugins", Action: list, }, { Name: "login", Usage: "login to container registry", Flags: []cli.Flag{ cli.StringFlag{Name: "username"}, cli.StringFlag{Name: "password"}, cli.BoolFlag{Name: "password-stdin", Usage: "take the password from stdin"}, }, ArgsUsage: "[server]", Action: login, }, } return common.NewCommandWithSubcommands( "fleeting", "manage fleeting plugins", common.CommanderFunc(func(ctx *cli.Context) { _ = cli.ShowAppHelp(ctx) }), false, subcommands, cli.StringFlag{Name: "config, c", EnvVar: "CONFIG_FILE", Value: commands.GetDefaultConfigFile()}, ) } ================================================ FILE: commands/fleeting/fleeting_integration_test.go ================================================ //go:build integration package fleeting import ( "os" "path/filepath" "testing" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/fleeting/fleeting-artifact/pkg/installer" ) func init() { osExit = func(code int) { if code == 0 { return } panic(code) } } func TestInstall(t *testing.T) { app := cli.NewApp() app.Name = "runner" app.Commands = []cli.Command{ NewCommand(), } const config = ` [[runners]] [runners.autoscaler] plugin = "aws:0.5.0" ` configPath := filepath.Join(t.TempDir(), "test.toml") require.NoError(t, os.WriteFile(configPath, []byte(config), 0o777)) // no error installing multiple times require.NoError(t, app.Run([]string{"runner", "fleeting", "-c", configPath, "install"})) require.NoError(t, app.Run([]string{"runner", "fleeting", "-c", configPath, "install"})) // ensure plugin installed require.DirExists(t, filepath.Join(installer.InstallDir(), "registry.gitlab.com/gitlab-org/fleeting/plugins/aws/0.5.0")) } ================================================ FILE: commands/health_helper.go ================================================ package commands import ( "sync" "time" "github.com/prometheus/client_golang/prometheus" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" ) type healthData struct { failures int lastCheck time.Time } type healthHelper struct { healthy map[string]*healthData healthyLock sync.Mutex healthCheckFailures *prometheus.CounterVec } func newHealthHelper() healthHelper { return healthHelper{ healthCheckFailures: prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_worker_health_check_failures_total", Help: "Total number of runner worker health check failures", }, []string{"runner", "runner_name", "system_id"}, ), } } func (mr *healthHelper) getHealth(id string) *healthData { if mr.healthy == nil { mr.healthy = map[string]*healthData{} } health := mr.healthy[id] if health == nil { health = &healthData{ lastCheck: time.Now(), } mr.healthy[id] = health } return health } func (mr *healthHelper) isHealthy(runner *common.RunnerConfig) bool { mr.healthyLock.Lock() defer mr.healthyLock.Unlock() mr.runnerHealthCheckFailures(runner).Add(0) id := runner.UniqueID() health := mr.getHealth(id) if health.failures < runner.GetUnhealthyRequestsLimit() { return true } if time.Since(health.lastCheck) > runner.GetUnhealthyInterval() { logrus.WithFields(logrus.Fields{ "unhealthy_requests": health.failures, "unhealthy_requests_limit": runner.GetUnhealthyRequestsLimit(), "unhealthy_interval": runner.GetUnhealthyInterval(), }).Warningf("Runner %q is not healthy, but check for a new job will be forced!", id) health.failures = 0 health.lastCheck = time.Now() return true } return false } func (mr *healthHelper) runnerHealthCheckFailures(runner *common.RunnerConfig) prometheus.Counter { return mr.healthCheckFailures.WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()) } func (mr *healthHelper) markHealth(runner *common.RunnerConfig, healthy bool) { mr.healthyLock.Lock() defer mr.healthyLock.Unlock() id := runner.UniqueID() health := mr.getHealth(id) if healthy { health.failures = 0 health.lastCheck = time.Now() return } mr.runnerHealthCheckFailures(runner).Inc() health.failures++ if health.failures >= runner.GetUnhealthyRequestsLimit() { logrus.WithFields(logrus.Fields{ "unhealthy_requests": health.failures, "unhealthy_requests_limit": runner.GetUnhealthyRequestsLimit(), }).Errorf( "Runner %q is unhealthy and will be disabled for %s seconds!", id, runner.GetUnhealthyInterval(), ) } } func (mr *healthHelper) Describe(ch chan<- *prometheus.Desc) { mr.healthCheckFailures.Describe(ch) } func (mr *healthHelper) Collect(ch chan<- prometheus.Metric) { mr.healthCheckFailures.Collect(ch) } ================================================ FILE: commands/helpers/archive/archive.go ================================================ package archive import ( "context" "errors" "fmt" "io" "os" ) var ( // ErrUnsupportedArchiveFormat is returned if an archiver or extractor format // requested has not been registered. ErrUnsupportedArchiveFormat = errors.New("unsupported archive format") ) // CompressionLevel type for specifying a compression level. type CompressionLevel int // Compression levels from fastest (low/zero compression ratio) to slowest // (high compression ratio). const ( FastestCompression CompressionLevel = -2 FastCompression CompressionLevel = -1 DefaultCompression CompressionLevel = 0 SlowCompression CompressionLevel = 1 SlowestCompression CompressionLevel = 2 ) // Format type for specifying format. type Format string // Formats supported by GitLab. const ( Raw Format = "raw" Gzip Format = "gzip" Zip Format = "zip" ZipZstd Format = "zipzstd" TarZstd Format = "tarzstd" ) var ( archivers = make(map[Format]NewArchiverFunc) extractors = make(map[Format]NewExtractorFunc) ) // Archiver is an interface for the Archive method. type Archiver interface { Archive(ctx context.Context, files map[string]os.FileInfo) error } // Extractor is an interface for the Extract method. type Extractor interface { Extract(ctx context.Context) error } // NewArchiverFunc is a function that can be registered (with Register()) and // used to instantiate a new archiver (with NewArchiver()). type NewArchiverFunc func(w io.Writer, dir string, level CompressionLevel) (Archiver, error) // NewExtractorFunc is a function that can be registered (with Register()) and // used to instantiate a new extractor (with NewExtractor()). type NewExtractorFunc func(r io.ReaderAt, size int64, dir string) (Extractor, error) // Register registers a new archiver, overriding the archiver and/or extractor // for the format provided. func Register( format Format, archiver NewArchiverFunc, extractor NewExtractorFunc, ) ( prevArchiver NewArchiverFunc, prevExtractor NewExtractorFunc, ) { if archiver != nil { prevArchiver = archivers[format] archivers[format] = archiver } if extractor != nil { prevExtractor = extractors[format] extractors[format] = extractor } return } // NewArchiver returns a new Archiver of the specified format. // // The archiver will ensure that files to be archived are children of the // directory provided. func NewArchiver(format Format, w io.Writer, dir string, level CompressionLevel) (Archiver, error) { fn := archivers[format] if fn == nil { return nil, fmt.Errorf("%q format: %w", format, ErrUnsupportedArchiveFormat) } return fn(w, dir, level) } // NewExtractor returns a new Extractor of the specified format. // // The extractor will extract files to the directory provided. func NewExtractor(format Format, r io.ReaderAt, size int64, dir string) (Extractor, error) { fn := extractors[format] if fn == nil { return nil, fmt.Errorf("%q format: %w", format, ErrUnsupportedArchiveFormat) } return fn(r, size, dir) } ================================================ FILE: commands/helpers/archive/archive_test.go ================================================ //go:build !integration package archive_test import ( "io" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/fastzip" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/gziplegacy" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/raw" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/tarzstd" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/ziplegacy" ) func TestDefaultRegistration(t *testing.T) { tests := map[archive.Format]struct { hasArchiver, hasExtractor bool }{ archive.Raw: {hasArchiver: true, hasExtractor: false}, archive.Gzip: {hasArchiver: true, hasExtractor: false}, archive.Zip: {hasArchiver: true, hasExtractor: true}, archive.ZipZstd: {hasArchiver: true, hasExtractor: true}, archive.TarZstd: {hasArchiver: true, hasExtractor: true}, } for tn, tc := range tests { t.Run(string(tn), func(t *testing.T) { _, err := archive.NewArchiver(tn, nil, "", archive.DefaultCompression) if tc.hasArchiver { assert.NoError(t, err) } else { assert.ErrorIs(t, err, archive.ErrUnsupportedArchiveFormat) } _, err = archive.NewExtractor(tn, nil, 0, "") if tc.hasExtractor { assert.NoError(t, err) } else { assert.ErrorIs(t, err, archive.ErrUnsupportedArchiveFormat) } }) } } func TestRegister(t *testing.T) { format := archive.Format("new-format") archive.Register(format, ziplegacy.NewArchiver, ziplegacy.NewExtractor) _, err := archive.NewArchiver(format, nil, "", archive.DefaultCompression) assert.NoError(t, err) _, err = archive.NewExtractor(format, nil, 0, "") assert.NoError(t, err) } func TestRegisterOverride(t *testing.T) { existingGzipArchiver, err := gziplegacy.NewArchiver(io.Discard, "", archive.DefaultCompression) assert.NoError(t, err) existingZipArchiver, err := ziplegacy.NewArchiver(io.Discard, "", archive.DefaultCompression) assert.NoError(t, err) existingZipExtractor, err := ziplegacy.NewExtractor(nil, 0, "") assert.NoError(t, err) // assert existing archiver archiver, err := archive.NewArchiver(archive.Gzip, nil, "", archive.DefaultCompression) assert.NoError(t, err) assert.IsType(t, existingGzipArchiver, archiver) _, err = archive.NewExtractor(archive.Gzip, nil, 0, "") assert.Error(t, err) // override archive.Register(archive.Gzip, ziplegacy.NewArchiver, ziplegacy.NewExtractor) archiver, err = archive.NewArchiver(archive.Gzip, nil, "", archive.DefaultCompression) assert.NoError(t, err) assert.IsType(t, existingZipArchiver, archiver) extractor, err := archive.NewExtractor(archive.Gzip, nil, 0, "") assert.NoError(t, err) assert.IsType(t, existingZipExtractor, extractor) } ================================================ FILE: commands/helpers/archive/fastzip/options_test.go ================================================ //go:build !integration package fastzip import ( "fmt" "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) func TestOptionFromEnvValidation(t *testing.T) { t.Run("archiver", func(t *testing.T) { for _, option := range []string{archiverBufferSize, archiverConcurrency} { defer tempEnvOption(option, "invalid")() _, err := getArchiverOptionsFromEnvironment() assert.Error(t, err) } }) t.Run("extractor", func(t *testing.T) { for _, option := range []string{extractorConcurrency} { defer tempEnvOption(option, "invalid")() _, err := getExtractorOptionsFromEnvironment() assert.Error(t, err) } }) } func TestArchiverOptionFromEnv(t *testing.T) { tests := map[string]struct { value string err string }{ archiverStagingDir: {"/dev/null", "fastzip archiver unable to create temporary directory"}, archiverConcurrency: {"-1", "concurrency must be at least 1"}, } for option, tc := range tests { t.Run(fmt.Sprintf("%s=%s", option, tc.value), func(t *testing.T) { defer tempEnvOption(option, tc.value)() archiveTestDir(t, func(_ string, _ string, err error) { require.Error(t, err) require.Contains(t, err.Error(), tc.err) }) }) } } func TestExtractorOptionFromEnv(t *testing.T) { tests := map[string]struct { value string err string }{ extractorConcurrency: {"-1", "concurrency must be at least 1"}, } for option, tc := range tests { t.Run(fmt.Sprintf("%s=%s", option, tc.value), func(t *testing.T) { defer tempEnvOption(option, tc.value)() archiveTestDir(t, func(archiveFile string, dir string, err error) { require.NoError(t, err) f, err := os.Open(archiveFile) require.NoError(t, err) defer f.Close() fi, err := f.Stat() require.NoError(t, err) extractor, err := NewExtractor(f, fi.Size(), dir) require.NoError(t, err) err = extractor.Extract(t.Context()) require.Error(t, err) require.Contains(t, err.Error(), tc.err) }) }) } } func archiveTestDir(t *testing.T, fn func(string, string, error)) { dir := t.TempDir() pathname := filepath.Join(dir, "test_file") require.NoError(t, os.WriteFile(pathname, []byte("foobar"), 0o777)) fi, err := os.Stat(pathname) require.NoError(t, err) f, err := os.CreateTemp(dir, "fastzip") require.NoError(t, err) defer f.Close() archiver, err := NewArchiver(f, dir, archive.DefaultCompression) require.NoError(t, err) err = archiver.Archive(t.Context(), map[string]os.FileInfo{pathname: fi}) require.NoError(t, f.Close()) fn(f.Name(), dir, err) } func tempEnvOption(option, value string) func() { existing := os.Getenv(option) os.Setenv(option, value) return func() { os.Setenv(option, existing) } } ================================================ FILE: commands/helpers/archive/fastzip/zip_fastzip_archiver.go ================================================ package fastzip import ( "archive/zip" "context" "fmt" "io" "os" "strconv" "github.com/klauspost/compress/zstd" "github.com/saracen/fastzip" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) var flateLevels = map[archive.CompressionLevel]int{ archive.FastestCompression: 0, archive.FastCompression: 1, archive.DefaultCompression: 5, archive.SlowCompression: 7, archive.SlowestCompression: 9, } var zstdLevels = map[archive.CompressionLevel]int{ archive.FastestCompression: 0, archive.FastCompression: int(zstd.SpeedFastest), archive.DefaultCompression: int(zstd.SpeedDefault), archive.SlowCompression: int(zstd.SpeedBetterCompression), archive.SlowestCompression: int(zstd.SpeedBestCompression), } func init() { archive.Register(archive.ZipZstd, NewZstdArchiver, nil) } const ( archiverConcurrency = "FASTZIP_ARCHIVER_CONCURRENCY" archiverBufferSize = "FASTZIP_ARCHIVER_BUFFER_SIZE" archiverStagingDir = "ARCHIVER_STAGING_DIR" // no prefix: use ArtifactsDownloaderCommand's env setting ) // archiver is a zip stream archiver. type archiver struct { w io.Writer dir string level archive.CompressionLevel zstd bool } // NewArchiver returns a new Zip Archiver. func NewArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{ w: w, dir: dir, level: level, }, nil } // NewArchiver returns a new Zip Archiver (with zstd compression). func NewZstdArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{ w: w, dir: dir, level: level, zstd: true, }, nil } // Archive archives all files provided. func (a *archiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { tmpDir, err := os.MkdirTemp(os.Getenv(archiverStagingDir), "fastzip") if err != nil { return fmt.Errorf("fastzip archiver unable to create temporary directory: %w", err) } defer os.RemoveAll(tmpDir) opts, err := getArchiverOptionsFromEnvironment() if err != nil { return err } opts = append(opts, fastzip.WithStageDirectory(tmpDir)) if a.level == archive.FastestCompression { opts = append(opts, fastzip.WithArchiverMethod(zip.Store)) } if a.zstd { opts = append(opts, fastzip.WithArchiverMethod(zstd.ZipMethodWinZip)) } fa, err := fastzip.NewArchiver(a.w, a.dir, opts...) if err != nil { return err } if a.level != archive.FastestCompression { if a.zstd { fa.RegisterCompressor(zstd.ZipMethodWinZip, fastzip.ZstdCompressor(zstdLevels[a.level])) } else { fa.RegisterCompressor(zip.Deflate, fastzip.FlateCompressor(flateLevels[a.level])) } } err = fa.Archive(ctx, files) if cerr := fa.Close(); err == nil && cerr != nil { return cerr } return err } func getArchiverOptionsFromEnvironment() ([]fastzip.ArchiverOption, error) { var opts []fastzip.ArchiverOption if val := os.Getenv(archiverConcurrency); val != "" { concurrency, err := strconv.ParseInt(val, 10, 64) if err != nil { return nil, fmt.Errorf("fastzip archiver concurrency: %w", err) } opts = append(opts, fastzip.WithArchiverConcurrency(int(concurrency))) } if val := os.Getenv(archiverBufferSize); val != "" { bufferSize, err := strconv.ParseInt(val, 10, 64) if err != nil { return nil, fmt.Errorf("fastzip archiver buffer size: %w", err) } opts = append(opts, fastzip.WithArchiverBufferSize(int(bufferSize))) } return opts, nil } ================================================ FILE: commands/helpers/archive/fastzip/zip_fastzip_extractor.go ================================================ package fastzip import ( "context" "fmt" "io" "os" "strconv" "github.com/saracen/fastzip" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) const ( extractorConcurrency = "FASTZIP_EXTRACTOR_CONCURRENCY" ) // extractor is a zip stream extractor. type extractor struct { r io.ReaderAt size int64 dir string } // NewExtractor returns a new Zip Extractor. func NewExtractor(r io.ReaderAt, size int64, dir string) (archive.Extractor, error) { return &extractor{r: r, size: size, dir: dir}, nil } // Extract extracts files from the reader to the directory passed to // NewExtractor. func (e *extractor) Extract(ctx context.Context) error { opts, err := getExtractorOptionsFromEnvironment() if err != nil { return err } extractor, err := fastzip.NewExtractorFromReader(e.r, e.size, e.dir, opts...) if err != nil { return err } defer extractor.Close() return extractor.Extract(ctx) } func getExtractorOptionsFromEnvironment() ([]fastzip.ExtractorOption, error) { var opts []fastzip.ExtractorOption if val := os.Getenv(extractorConcurrency); val != "" { concurrency, err := strconv.ParseInt(val, 10, 64) if err != nil { return nil, fmt.Errorf("fastzip extractor concurrency: %w", err) } opts = append(opts, fastzip.WithExtractorConcurrency(int(concurrency))) } return opts, nil } ================================================ FILE: commands/helpers/archive/gziplegacy/gzip_legacy_archiver.go ================================================ package gziplegacy import ( "context" "io" "os" "sort" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/helpers/archives" ) func init() { archive.Register(archive.Gzip, NewArchiver, nil) } // archiver is a gzip stream archiver. type archiver struct { w io.Writer dir string } // NewArchiver returns a new Gzip Archiver. func NewArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{w: w, dir: dir}, nil } // Archive archives all files as new gzip streams. func (a *archiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { sorted := make([]string, 0, len(files)) for filename := range files { sorted = append(sorted, filename) } sort.Strings(sorted) return archives.CreateGzipArchive(a.w, sorted) } ================================================ FILE: commands/helpers/archive/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package archive import ( "context" "os" mock "github.com/stretchr/testify/mock" ) // NewMockArchiver creates a new instance of MockArchiver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockArchiver(t interface { mock.TestingT Cleanup(func()) }) *MockArchiver { mock := &MockArchiver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockArchiver is an autogenerated mock type for the Archiver type type MockArchiver struct { mock.Mock } type MockArchiver_Expecter struct { mock *mock.Mock } func (_m *MockArchiver) EXPECT() *MockArchiver_Expecter { return &MockArchiver_Expecter{mock: &_m.Mock} } // Archive provides a mock function for the type MockArchiver func (_mock *MockArchiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { ret := _mock.Called(ctx, files) if len(ret) == 0 { panic("no return value specified for Archive") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context, map[string]os.FileInfo) error); ok { r0 = returnFunc(ctx, files) } else { r0 = ret.Error(0) } return r0 } // MockArchiver_Archive_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Archive' type MockArchiver_Archive_Call struct { *mock.Call } // Archive is a helper method to define mock.On call // - ctx context.Context // - files map[string]os.FileInfo func (_e *MockArchiver_Expecter) Archive(ctx interface{}, files interface{}) *MockArchiver_Archive_Call { return &MockArchiver_Archive_Call{Call: _e.mock.On("Archive", ctx, files)} } func (_c *MockArchiver_Archive_Call) Run(run func(ctx context.Context, files map[string]os.FileInfo)) *MockArchiver_Archive_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 map[string]os.FileInfo if args[1] != nil { arg1 = args[1].(map[string]os.FileInfo) } run( arg0, arg1, ) }) return _c } func (_c *MockArchiver_Archive_Call) Return(err error) *MockArchiver_Archive_Call { _c.Call.Return(err) return _c } func (_c *MockArchiver_Archive_Call) RunAndReturn(run func(ctx context.Context, files map[string]os.FileInfo) error) *MockArchiver_Archive_Call { _c.Call.Return(run) return _c } // NewMockExtractor creates a new instance of MockExtractor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockExtractor(t interface { mock.TestingT Cleanup(func()) }) *MockExtractor { mock := &MockExtractor{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockExtractor is an autogenerated mock type for the Extractor type type MockExtractor struct { mock.Mock } type MockExtractor_Expecter struct { mock *mock.Mock } func (_m *MockExtractor) EXPECT() *MockExtractor_Expecter { return &MockExtractor_Expecter{mock: &_m.Mock} } // Extract provides a mock function for the type MockExtractor func (_mock *MockExtractor) Extract(ctx context.Context) error { ret := _mock.Called(ctx) if len(ret) == 0 { panic("no return value specified for Extract") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context) error); ok { r0 = returnFunc(ctx) } else { r0 = ret.Error(0) } return r0 } // MockExtractor_Extract_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Extract' type MockExtractor_Extract_Call struct { *mock.Call } // Extract is a helper method to define mock.On call // - ctx context.Context func (_e *MockExtractor_Expecter) Extract(ctx interface{}) *MockExtractor_Extract_Call { return &MockExtractor_Extract_Call{Call: _e.mock.On("Extract", ctx)} } func (_c *MockExtractor_Extract_Call) Run(run func(ctx context.Context)) *MockExtractor_Extract_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockExtractor_Extract_Call) Return(err error) *MockExtractor_Extract_Call { _c.Call.Return(err) return _c } func (_c *MockExtractor_Extract_Call) RunAndReturn(run func(ctx context.Context) error) *MockExtractor_Extract_Call { _c.Call.Return(run) return _c } ================================================ FILE: commands/helpers/archive/raw/raw_archiver.go ================================================ package raw import ( "context" "errors" "io" "os" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) func init() { archive.Register(archive.Raw, NewArchiver, nil) } // ErrTooManyRawFiles is returned if more than one file is passed to the // RawArchiver. var ErrTooManyRawFiles = errors.New("only one file can be sent as raw") // archiver is a raw archiver. It doesn't support compression nor multiple // files. type archiver struct { w io.Writer dir string } // NewArchiver returns a new Raw Archiver. func NewArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{w: w, dir: dir}, nil } // Archive opens and copies a single file to the writer passed to // NewRawArchiver. If more than one file is passed, ErrTooManyRawFiles is // returned. func (a *archiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { if len(files) > 1 { return ErrTooManyRawFiles } for pathname := range files { f, err := os.Open(pathname) if err != nil { return err } defer f.Close() _, err = io.Copy(a.w, f) return err } return nil } ================================================ FILE: commands/helpers/archive/tarzstd/ops_unix.go ================================================ //go:build !windows package tarzstd import ( "os" "runtime" "time" "golang.org/x/sys/unix" ) func lchmod(name string, mode os.FileMode) error { var flags int if runtime.GOOS == "linux" { if mode&os.ModeSymlink != 0 { return nil } } else { flags = unix.AT_SYMLINK_NOFOLLOW } err := unix.Fchmodat(unix.AT_FDCWD, name, uint32(mode), flags) if err != nil { return &os.PathError{Op: "lchmod", Path: name, Err: err} } return nil } func lchtimes(name string, mode os.FileMode, atime, mtime time.Time) error { if runtime.GOOS == "zos" { if err := lchmod(name, mode); err != nil { return err } } at := unix.NsecToTimeval(atime.UnixNano()) mt := unix.NsecToTimeval(mtime.UnixNano()) tv := [2]unix.Timeval{at, mt} err := unix.Lutimes(name, tv[:]) if err != nil { return &os.PathError{Op: "lchtimes", Path: name, Err: err} } return nil } func lchown(name string, uid, gid int) error { return os.Lchown(name, uid, gid) } ================================================ FILE: commands/helpers/archive/tarzstd/ops_windows.go ================================================ //go:build windows package tarzstd import ( "os" "time" ) func lchmod(name string, mode os.FileMode) error { if mode&os.ModeSymlink != 0 { return nil } return os.Chmod(name, mode) } func lchtimes(name string, mode os.FileMode, atime, mtime time.Time) error { if mode&os.ModeSymlink != 0 { return nil } return os.Chtimes(name, atime, mtime) } func lchown(name string, uid, gid int) error { return nil } ================================================ FILE: commands/helpers/archive/tarzstd/tarzstd_archiver.go ================================================ package tarzstd import ( "archive/tar" "context" "fmt" "io" "os" "path/filepath" "sort" "strings" "github.com/klauspost/compress/zstd" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) func init() { archive.Register(archive.TarZstd, NewArchiver, NewExtractor) } const irregularModes = os.ModeSocket | os.ModeDevice | os.ModeCharDevice | os.ModeNamedPipe var levels = map[archive.CompressionLevel]int{ archive.FastestCompression: int(zstd.SpeedFastest), archive.FastCompression: int(zstd.SpeedFastest), archive.DefaultCompression: int(zstd.SpeedDefault), archive.SlowCompression: int(zstd.SpeedBetterCompression), archive.SlowestCompression: int(zstd.SpeedBestCompression), } // archiver is a tar+zstd stream archiver. type archiver struct { w io.Writer dir string level archive.CompressionLevel } // NewArchiver returns a new Tar+zstd Archiver. func NewArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{w: w, dir: dir, level: level}, nil } // Archive archives all files. // //nolint:gocognit func (a *archiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { sorted := make([]string, 0, len(files)) for filename := range files { sorted = append(sorted, filename) } sort.Strings(sorted) zw, err := zstd.NewWriter(a.w, zstd.WithEncoderLevel(zstd.EncoderLevel(levels[a.level]))) if err != nil { return err } defer zw.Close() tw := tar.NewWriter(zw) defer tw.Close() for _, name := range sorted { fi := files[name] if fi.Mode()&irregularModes != 0 { continue } path, err := filepath.Abs(name) if err != nil { return err } if !strings.HasPrefix(path, a.dir+string(filepath.Separator)) && path != a.dir { return fmt.Errorf("%s cannot be archived from outside of chroot (%s)", name, a.dir) } rel, err := filepath.Rel(a.dir, path) if err != nil { return err } if ctx.Err() != nil { return ctx.Err() } var link string if fi.Mode()&os.ModeSymlink != 0 { link, err = os.Readlink(path) if err != nil { return err } } hdr, err := tar.FileInfoHeader(fi, link) if err != nil { return err } hdr.Name = rel if fi.IsDir() { hdr.Name += "/" } if err := tw.WriteHeader(hdr); err != nil { return err } if !fi.Mode().IsRegular() { continue } f, err := os.Open(path) if err != nil { return err } if _, err = io.Copy(tw, f); err != nil { f.Close() return err } f.Close() } if err := tw.Close(); err != nil { return err } return zw.Close() } ================================================ FILE: commands/helpers/archive/tarzstd/tarzstd_extractor.go ================================================ package tarzstd import ( "archive/tar" "context" "fmt" "io" "os" "path/filepath" "strings" "time" "github.com/klauspost/compress/zstd" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) // extractor is a tar+zstd stream extractor. type extractor struct { r io.ReaderAt size int64 dir string } // NewExtractor returns a new tar+zstd extractor. func NewExtractor(r io.ReaderAt, size int64, dir string) (archive.Extractor, error) { return &extractor{r: r, size: size, dir: dir}, nil } // Extract extracts files from the reader to the directory passed to // NewZipExtractor. // //nolint:gocognit func (e *extractor) Extract(ctx context.Context) error { zr, err := zstd.NewReader(io.NewSectionReader(e.r, 0, e.size), zstd.WithDecoderLowmem(true)) if err != nil { return err } defer zr.Close() tr := tar.NewReader(zr) deferred := map[string]*tar.Header{} for { hdr, err := tr.Next() if err == io.EOF { break } if err != nil { return err } fi := hdr.FileInfo() if fi.Mode()&irregularModes != 0 { continue } var path string path, err = filepath.Abs(filepath.Join(e.dir, hdr.Name)) if err != nil { return err } if !strings.HasPrefix(path, e.dir+string(filepath.Separator)) && path != e.dir { return fmt.Errorf("%s cannot be extracted outside of chroot (%s)", path, e.dir) } if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil { return err } if ctx.Err() != nil { return ctx.Err() } switch { case fi.Mode()&os.ModeSymlink != 0: deferred[path] = hdr continue case fi.Mode().IsDir(): deferred[path] = hdr err := os.Mkdir(path, 0777) if err != nil && !os.IsExist(err) { return err } case fi.Mode().IsRegular(): f, err := os.Create(path) if err != nil { return err } if _, err := io.Copy(f, tr); err != nil { f.Close() return err } if err := f.Close(); err != nil { return err } if err := e.updateFileMetadata(path, hdr); err != nil { return err } } } for path, hdr := range deferred { fi := hdr.FileInfo() if fi.Mode()&os.ModeSymlink == 0 && !fi.Mode().IsDir() { continue } if fi.Mode()&os.ModeSymlink != 0 { if err := os.Symlink(hdr.Linkname, path); err != nil { return err } } if err := e.updateFileMetadata(path, hdr); err != nil { return err } } return nil } func (e *extractor) updateFileMetadata(path string, hdr *tar.Header) error { fi := hdr.FileInfo() if err := lchtimes(path, fi.Mode(), time.Now(), fi.ModTime()); err != nil { return err } if err := lchmod(path, fi.Mode()); err != nil { return err } _ = lchown(path, hdr.Uid, hdr.Gid) return nil } ================================================ FILE: commands/helpers/archive/ziplegacy/zip_legacy_archiver.go ================================================ package ziplegacy import ( "archive/zip" "context" "io" "os" "sort" "github.com/klauspost/compress/zstd" "github.com/saracen/fastzip" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/helpers/archives" ) func init() { zip.RegisterDecompressor(zstd.ZipMethodWinZip, fastzip.ZstdDecompressor()) archive.Register(archive.Zip, NewArchiver, NewExtractor) archive.Register(archive.ZipZstd, nil, NewExtractor) } // archiver is a zip stream archiver. type archiver struct { w io.Writer dir string } // NewArchiver returns a new Zip Archiver. func NewArchiver(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { return &archiver{w: w, dir: dir}, nil } // Archive archives all files as new gzip streams. func (a *archiver) Archive(ctx context.Context, files map[string]os.FileInfo) error { sorted := make([]string, 0, len(files)) for filename := range files { sorted = append(sorted, filename) } sort.Strings(sorted) return archives.CreateZipArchive(a.w, sorted) } ================================================ FILE: commands/helpers/archive/ziplegacy/zip_legacy_extractor.go ================================================ package ziplegacy import ( "archive/zip" "context" "io" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/helpers/archives" ) // extractor is a zip stream extractor. type extractor struct { r io.ReaderAt size int64 dir string } // NewExtractor returns a new Zip Extractor. func NewExtractor(r io.ReaderAt, size int64, dir string) (archive.Extractor, error) { return &extractor{r: r, size: size, dir: dir}, nil } // Extract extracts files from the reader to the directory passed to // NewZipExtractor. func (e *extractor) Extract(ctx context.Context) error { zr, err := zip.NewReader(e.r, e.size) if err != nil { return err } return archives.ExtractZipArchive(zr) } ================================================ FILE: commands/helpers/archiver.go ================================================ package helpers import ( "os" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/fastzip" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" // auto-register default archivers/extractors _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/gziplegacy" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/raw" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/tarzstd" _ "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/ziplegacy" "github.com/sirupsen/logrus" ) func init() { // enable fastzip archiver/extractor logger := logrus.WithField("name", featureflags.UseFastzip) if on := featureflags.IsOn(logger, os.Getenv(featureflags.UseFastzip)); on { archive.Register(archive.Zip, fastzip.NewArchiver, fastzip.NewExtractor) // The default zstd compressor is fastzip, this is registered via the // fastzip implementation (helpers/archive/fastzip). // // The default zstd decompressor is the legacy zip implementation (helpers/archive/ziplegacy). // This intended to allow the default zip implementation to still be able to decompress zstd, // even if it is unable to compress it (only fastzip can compress). This also allows the older // extraction behaviour to be enabled. // // Here we're registering the decompress only if FF_USE_FASTZIP is enabled. This overrides // the ziplegacy zstd support. archive.Register(archive.ZipZstd, nil, fastzip.NewExtractor) } } // GetCompressionLevel converts the compression level name to compression level type // https://docs.gitlab.com/ci/runners/configure_runners/#artifact-and-cache-settings func GetCompressionLevel(name string) archive.CompressionLevel { switch name { case "fastest": return archive.FastestCompression case "fast": return archive.FastCompression case "slow": return archive.SlowCompression case "slowest": return archive.SlowestCompression case "default", "": return archive.DefaultCompression } logrus.Warningf("compression level %q is invalid, falling back to default", name) return archive.DefaultCompression } ================================================ FILE: commands/helpers/archiver_test.go ================================================ //go:build !integration package helpers import ( "bytes" "io/fs" "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" ) func TestCompressionLevel(t *testing.T) { tests := map[string]archive.CompressionLevel{ "fastest": archive.FastestCompression, "fast": archive.FastCompression, "slow": archive.SlowCompression, "slowest": archive.SlowestCompression, "default": archive.DefaultCompression, "": archive.DefaultCompression, "invalid": archive.DefaultCompression, } for name, level := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, level, GetCompressionLevel(name)) }) } } func TestArchiver(t *testing.T) { small := []byte("12345678") large := bytes.Repeat([]byte("198273qhnjbqwdjbqwe2109u3abcdef3"), 1024*1024) originalDir, _ := os.Getwd() defer func() { _ = os.Chdir(originalDir) }() OnEachArchiver(t, func(t *testing.T, format archive.Format) { dir := t.TempDir() buf := new(bytes.Buffer) require.NoError(t, os.WriteFile(filepath.Join(dir, "small"), small, 0777)) require.NoError(t, os.WriteFile(filepath.Join(dir, "large"), large, 0777)) archiver, err := archive.NewArchiver(format, buf, dir, archive.DefaultCompression) require.NoError(t, err) files := make(map[string]fs.FileInfo) _ = filepath.Walk(dir, func(path string, info fs.FileInfo, err error) error { if info.IsDir() { return nil } files[path] = info return nil }) assert.Equal(t, 2, len(files)) require.NoError(t, archiver.Archive(t.Context(), files)) input := buf.Bytes() out := t.TempDir() // hack: legacy archiver require being in the correct working dir _ = os.Chdir(out) // for Windows: change directory on exit so that we're not "using" the directory we're removing defer func() { _ = os.Chdir(originalDir) }() extractor, err := archive.NewExtractor(format, bytes.NewReader(input), int64(len(input)), out) require.NoError(t, err) require.NoError(t, extractor.Extract(t.Context())) smallEq, err := os.ReadFile(filepath.Join(out, "small")) require.NoError(t, err) assert.Equal(t, small, smallEq) largeEq, err := os.ReadFile(filepath.Join(out, "large")) require.NoError(t, err) assert.Equal(t, large, largeEq) }) } func TestZipArchiveExtract(t *testing.T) { small := []byte("12345678") large := bytes.Repeat([]byte("198273qhnjbqwdjbqwe2109u3abcdef3"), 1024*1024) OnEachZipArchiver(t, func(t *testing.T) { dir := t.TempDir() buf := new(bytes.Buffer) require.NoError(t, os.WriteFile(filepath.Join(dir, "small"), small, 0777)) require.NoError(t, os.WriteFile(filepath.Join(dir, "large"), large, 0777)) archiver, err := archive.NewArchiver(archive.Zip, buf, dir, archive.DefaultCompression) require.NoError(t, err) files := make(map[string]fs.FileInfo) _ = filepath.Walk(dir, func(path string, info fs.FileInfo, err error) error { if info.IsDir() { return nil } files[path] = info return nil }) assert.Equal(t, 2, len(files)) require.NoError(t, archiver.Archive(t.Context(), files)) input := buf.Bytes() OnEachZipExtractor(t, func(t *testing.T) { out := t.TempDir() extractor, err := archive.NewExtractor(archive.Zip, bytes.NewReader(input), int64(len(input)), out) require.NoError(t, err) require.NoError(t, extractor.Extract(t.Context())) smallEq, err := os.ReadFile(filepath.Join(out, "small")) require.NoError(t, err) assert.Equal(t, small, smallEq) largeEq, err := os.ReadFile(filepath.Join(out, "large")) require.NoError(t, err) assert.Equal(t, large, largeEq) }, "fastzip") }, "fastzip") } ================================================ FILE: commands/helpers/artifact_metadata.go ================================================ package helpers import ( "bufio" "crypto/sha256" "encoding/hex" "fmt" "io" "os" "path/filepath" "strconv" "strings" "time" prov_v1 "github.com/in-toto/attestation/go/predicates/provenance/v1" ita_v1 "github.com/in-toto/attestation/go/v1" "github.com/in-toto/in-toto-golang/in_toto" slsa_v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" "google.golang.org/protobuf/types/known/timestamppb" ) const ( artifactsStatementFormat = "%v-metadata.json" attestationTypeFormat = "https://gitlab.com/gitlab-org/gitlab-runner/-/blob/%v/PROVENANCE.md" attestationRunnerIDFormat = "%v/-/runners/%v" ) type artifactStatementGenerator struct { GenerateArtifactsMetadata bool `long:"generate-artifacts-metadata"` RunnerID int64 `long:"runner-id"` RepoURL string `long:"repo-url"` RepoDigest string `long:"repo-digest"` JobName string `long:"job-name"` ExecutorName string `long:"executor-name"` RunnerName string `long:"runner-name"` Parameters []string `long:"metadata-parameter"` StartedAtRFC3339 string `long:"started-at"` EndedAtRFC3339 string `long:"ended-at"` SLSAProvenanceVersion string `long:"schema-version"` } type generateStatementOptions struct { artifactName string files map[string]os.FileInfo artifactsWd string jobID int64 } const ( slsaProvenanceVersion1 = "v1" defaultSLSAProvenanceVersion = slsaProvenanceVersion1 ) func (g *artifactStatementGenerator) generateStatementToFile(opts generateStatementOptions) (string, error) { start, end, err := g.parseTimings() if err != nil { return "", err } if g.SLSAProvenanceVersion != slsaProvenanceVersion1 { logrus.Warnf("Unknown SLSA provenance version %s, defaulting to %s", g.SLSAProvenanceVersion, defaultSLSAProvenanceVersion) } subjects, err := g.generateSubjects(opts.files) if err != nil { return "", err } provenance, err := g.generateSLSAv1Predicate(opts.jobID, start, end) if err != nil { return "", err } predicateJSON, err := protojson.Marshal(provenance) if err != nil { return "", err } predicate := &structpb.Struct{} if err := protojson.Unmarshal(predicateJSON, predicate); err != nil { return "", err } statement := &ita_v1.Statement{ Type: in_toto.StatementInTotoV01, PredicateType: slsa_v1.PredicateSLSAProvenance, Subject: subjects, Predicate: predicate, } b, err := protojson.MarshalOptions{Multiline: true, Indent: " "}.Marshal(statement) if err != nil { return "", err } file := filepath.Join(opts.artifactsWd, fmt.Sprintf(artifactsStatementFormat, opts.artifactName)) err = os.WriteFile(file, b, 0o644) return file, err } func (g *artifactStatementGenerator) generateSLSAv1Predicate(jobId int64, start time.Time, end time.Time) (*prov_v1.Provenance, error) { externalParams, err := g.externalParams(g.JobName, g.RepoURL) if err != nil { return nil, err } internalParams, err := g.internalParams(jobId) if err != nil { return nil, err } return &prov_v1.Provenance{ BuildDefinition: &prov_v1.BuildDefinition{ BuildType: fmt.Sprintf(attestationTypeFormat, g.version()), ExternalParameters: externalParams, InternalParameters: internalParams, ResolvedDependencies: []*ita_v1.ResourceDescriptor{{ Uri: g.RepoURL, Digest: map[string]string{"sha256": g.RepoDigest}, }}, }, RunDetails: &prov_v1.RunDetails{ Builder: &prov_v1.Builder{ Id: fmt.Sprintf(attestationRunnerIDFormat, g.RepoURL, g.RunnerID), Version: map[string]string{ "gitlab-runner": g.version(), }, }, Metadata: &prov_v1.BuildMetadata{ InvocationId: fmt.Sprint(jobId), StartedOn: timestamppb.New(start), FinishedOn: timestamppb.New(end), }, }, }, nil } func (g *artifactStatementGenerator) externalParams(jobName, repoURL string) (*structpb.Struct, error) { paramsMap := make(map[string]any, len(g.Parameters)) for _, param := range g.Parameters { paramsMap[param] = "" } paramsMap["entryPoint"] = jobName paramsMap["source"] = repoURL params, err := structpb.NewStruct(paramsMap) if err != nil { return nil, err } return params, nil } func (g *artifactStatementGenerator) internalParams(jobId int64) (*structpb.Struct, error) { return structpb.NewStruct(map[string]any{ "name": g.RunnerName, "executor": g.ExecutorName, "architecture": common.AppVersion.Architecture, "job": strconv.FormatInt(jobId, 10), }) } func (g *artifactStatementGenerator) version() string { if strings.HasPrefix(common.AppVersion.Version, "v") { return common.AppVersion.Version } return common.AppVersion.Revision } func (g *artifactStatementGenerator) parseTimings() (time.Time, time.Time, error) { startedAt, err := time.Parse(time.RFC3339, g.StartedAtRFC3339) if err != nil { return time.Time{}, time.Time{}, err } endedAt, err := time.Parse(time.RFC3339, g.EndedAtRFC3339) if err != nil { return time.Time{}, time.Time{}, err } return startedAt, endedAt, nil } func (g *artifactStatementGenerator) generateSubjects(files map[string]os.FileInfo) ([]*ita_v1.ResourceDescriptor, error) { subjects := make([]*ita_v1.ResourceDescriptor, 0, len(files)) h := sha256.New() br := bufio.NewReader(nil) subjectGeneratorFunc := func(file string) (*ita_v1.ResourceDescriptor, error) { f, err := os.Open(file) if err != nil { return &ita_v1.ResourceDescriptor{}, err } defer f.Close() br.Reset(f) h.Reset() if _, err := io.Copy(h, br); err != nil { return &ita_v1.ResourceDescriptor{}, err } return &ita_v1.ResourceDescriptor{ Name: file, Digest: map[string]string{"sha256": hex.EncodeToString(h.Sum(nil))}, }, nil } for file, fi := range files { if !fi.Mode().IsRegular() { continue } subject, err := subjectGeneratorFunc(file) if err != nil { return nil, err } subjects = append(subjects, subject) } return subjects, nil } ================================================ FILE: commands/helpers/artifact_metadata_test.go ================================================ //go:build !integration package helpers import ( "crypto/sha256" "encoding/hex" "fmt" "io" "io/fs" "os" "path/filepath" "testing" "time" prov_v1 "github.com/in-toto/attestation/go/predicates/provenance/v1" ita_v1 "github.com/in-toto/attestation/go/v1" "github.com/in-toto/in-toto-golang/in_toto" slsa_v1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" "google.golang.org/protobuf/types/known/timestamppb" ) type fileInfo struct { name string mode fs.FileMode } func (fi fileInfo) Name() string { return fi.name } func (fi fileInfo) Size() int64 { return 0 } func (fi fileInfo) Mode() fs.FileMode { return fi.mode } func (fi fileInfo) ModTime() time.Time { return time.Now() } func (fi fileInfo) IsDir() bool { return fi.mode.IsDir() } func (fi fileInfo) Sys() any { return nil } func TestGenerateMetadataToFile(t *testing.T) { tmpDir := t.TempDir() tmpFile, err := os.CreateTemp(tmpDir, "") require.NoError(t, err) _, err = tmpFile.WriteString("testdata") require.NoError(t, err) require.NoError(t, tmpFile.Close()) sha := sha256.New() sha.Write([]byte("testdata")) checksum := sha.Sum(nil) // First format the time to RFC3339 and then parse it to get the correct precision startedAtRFC3339 := time.Now().Format(time.RFC3339) startedAt, err := time.Parse(time.RFC3339, startedAtRFC3339) require.NoError(t, err) endedAtRFC3339 := time.Now().Add(time.Minute).Format(time.RFC3339) endedAt, err := time.Parse(time.RFC3339, endedAtRFC3339) require.NoError(t, err) var testsStatementV1 = func( version string, g *artifactStatementGenerator, opts generateStatementOptions, ) *ita_v1.Statement { externalParams, err := g.externalParams(g.JobName, g.RepoURL) require.NoError(t, err) internalParams, err := g.internalParams(opts.jobID) require.NoError(t, err) provenance := &prov_v1.Provenance{ BuildDefinition: &prov_v1.BuildDefinition{ BuildType: fmt.Sprintf(attestationTypeFormat, version), ExternalParameters: externalParams, InternalParameters: internalParams, ResolvedDependencies: []*ita_v1.ResourceDescriptor{{ Uri: g.RepoURL, Digest: map[string]string{"sha256": g.RepoDigest}, }}, }, RunDetails: &prov_v1.RunDetails{ Builder: &prov_v1.Builder{ Id: fmt.Sprintf(attestationRunnerIDFormat, g.RepoURL, g.RunnerID), Version: map[string]string{ "gitlab-runner": version, }, }, Metadata: &prov_v1.BuildMetadata{ InvocationId: fmt.Sprint(opts.jobID), StartedOn: timestamppb.New(startedAt), FinishedOn: timestamppb.New(endedAt), }, }, } predicateJSON, err := protojson.Marshal(provenance) require.NoError(t, err) predicate := &structpb.Struct{} err = protojson.Unmarshal(predicateJSON, predicate) require.NoError(t, err) return &ita_v1.Statement{ Type: in_toto.StatementInTotoV01, PredicateType: slsa_v1.PredicateSLSAProvenance, Subject: []*ita_v1.ResourceDescriptor{ { Name: tmpFile.Name(), Digest: map[string]string{"sha256": hex.EncodeToString(checksum)}, }, }, Predicate: predicate, } } var testStatement = func( version string, g *artifactStatementGenerator, opts generateStatementOptions) any { switch g.SLSAProvenanceVersion { case slsaProvenanceVersion1: return testsStatementV1(version, g, opts) default: panic("unreachable, invalid statement version") } } var setVersion = func(version string) (string, func()) { originalVersion := common.AppVersion.Version common.AppVersion.Version = version return version, func() { common.AppVersion.Version = originalVersion } } var newGenerator = func(slsaVersion string) *artifactStatementGenerator { return &artifactStatementGenerator{ RunnerID: 1001, RepoURL: "testurl", RepoDigest: "testdigest", JobName: "testjobname", ExecutorName: "testexecutorname", RunnerName: "testrunnername", Parameters: []string{"testparam"}, StartedAtRFC3339: startedAtRFC3339, EndedAtRFC3339: endedAtRFC3339, SLSAProvenanceVersion: slsaVersion, } } tests := map[string]struct { opts generateStatementOptions newGenerator func(slsaVersion string) *artifactStatementGenerator expected func(*artifactStatementGenerator, generateStatementOptions) (any, func()) expectedError error }{ "basic": { newGenerator: newGenerator, opts: generateStatementOptions{ artifactName: "artifact-name", files: map[string]os.FileInfo{tmpFile.Name(): fileInfo{name: tmpFile.Name()}}, artifactsWd: tmpDir, jobID: 1000, }, expected: func(g *artifactStatementGenerator, opts generateStatementOptions) (any, func()) { version, cleanup := setVersion("v1.0.0") return testStatement(version, g, opts), cleanup }, }, "basic version isn't prefixed so use REVISION": { newGenerator: newGenerator, opts: generateStatementOptions{ artifactName: "artifact-name", files: map[string]os.FileInfo{tmpFile.Name(): fileInfo{name: tmpFile.Name()}}, artifactsWd: tmpDir, jobID: 1000, }, expected: func(g *artifactStatementGenerator, opts generateStatementOptions) (any, func()) { return testStatement(common.AppVersion.Revision, g, opts), func() {} }, }, "files subject doesn't exist": { newGenerator: newGenerator, opts: generateStatementOptions{ artifactName: "artifact-name", files: map[string]os.FileInfo{ tmpFile.Name(): fileInfo{name: tmpFile.Name()}, "nonexisting": fileInfo{name: "nonexisting"}, }, artifactsWd: tmpDir, jobID: 1000, }, expectedError: os.ErrNotExist, }, "non-regular file": { newGenerator: newGenerator, opts: generateStatementOptions{ artifactName: "artifact-name", files: map[string]os.FileInfo{ tmpFile.Name(): fileInfo{name: tmpFile.Name()}, "dir": fileInfo{name: "im-a-dir", mode: fs.ModeDir}}, artifactsWd: tmpDir, jobID: 1000, }, expected: func(g *artifactStatementGenerator, opts generateStatementOptions) (any, func()) { return testStatement(common.AppVersion.Revision, g, opts), func() {} }, }, "no parameters": { newGenerator: func(v string) *artifactStatementGenerator { g := newGenerator(v) g.Parameters = nil return g }, opts: generateStatementOptions{ artifactName: "artifact-name", files: map[string]os.FileInfo{tmpFile.Name(): fileInfo{name: tmpFile.Name()}}, artifactsWd: tmpDir, jobID: 1000, }, expected: func(g *artifactStatementGenerator, opts generateStatementOptions) (any, func()) { return testStatement(common.AppVersion.Revision, g, opts), func() {} }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { for _, v := range []string{slsaProvenanceVersion1} { t.Run(v, func(t *testing.T) { g := tt.newGenerator(v) var expected any if tt.expected != nil { var cleanup func() expected, cleanup = tt.expected(g, tt.opts) defer cleanup() } f, err := g.generateStatementToFile(tt.opts) if tt.expectedError == nil { require.NoError(t, err) } else { assert.Empty(t, f) assert.ErrorIs(t, err, tt.expectedError) return } filename := filepath.Base(f) assert.Equal(t, fmt.Sprintf(artifactsStatementFormat, tt.opts.artifactName), filename) file, err := os.Open(f) require.NoError(t, err) defer file.Close() b, err := io.ReadAll(file) require.NoError(t, err) indented, err := protojson.MarshalOptions{Multiline: true, Indent: " "}.Marshal(expected.(*ita_v1.Statement)) require.NoError(t, err) assert.Equal(t, string(indented), string(b)) assert.Contains(t, string(indented), startedAtRFC3339) assert.Contains(t, string(indented), endedAtRFC3339) }) } }) } } func TestGeneratePredicateV1(t *testing.T) { gen := &artifactStatementGenerator{ RunnerID: 1001, RepoURL: "testurl", RepoDigest: "testdigest", JobName: "testjobname", ExecutorName: "testexecutorname", RunnerName: "testrunnername", Parameters: []string{"testparam"}, SLSAProvenanceVersion: slsaProvenanceVersion1, } startTime := time.Now() endTime := startTime.Add(time.Minute) originalVersion := common.AppVersion.Version testVersion := "vTest" common.AppVersion.Version = testVersion defer func() { common.AppVersion.Version = originalVersion }() actualPredicate, err := gen.generateSLSAv1Predicate(10001, startTime, endTime) require.NoError(t, err) expectedBuildType := fmt.Sprintf(attestationTypeFormat, testVersion) assert.Equal(t, expectedBuildType, actualPredicate.BuildDefinition.BuildType) } ================================================ FILE: commands/helpers/artifacts_downloader.go ================================================ package helpers import ( "bytes" "context" "fmt" "io" "os" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/log" "gitlab.com/gitlab-org/gitlab-runner/network" ) type ArtifactsDownloaderCommand struct { common.JobCredentials retryHelper network common.Network meter.TransferMeterCommand DirectDownload bool `long:"direct-download" env:"FF_USE_DIRECT_DOWNLOAD" description:"Support direct download for data stored externally to GitLab"` StagingDir string `long:"archiver-staging-dir" env:"ARCHIVER_STAGING_DIR" description:"Directory to stage artifact archives"` } func NewArtifactsDownloaderCommand() cli.Command { n := network.NewGitLabClient( network.WithCertificateDirectory(commands.GetDefaultCertificateDirectory()), ) return common.NewCommand( "artifacts-downloader", "download and extract build artifacts (internal)", &ArtifactsDownloaderCommand{ network: n, retryHelper: retryHelper{ Retry: 2, RetryTime: time.Second, }, }, ) } func (c *ArtifactsDownloaderCommand) directDownloadFlag(retry int) *bool { // We want to send `?direct_download=true` // Use direct download only on a first attempt if c.DirectDownload && retry == 0 { return &c.DirectDownload } // We don't want to send `?direct_download=false` return nil } func (c *ArtifactsDownloaderCommand) download(file string, retry int) error { artifactsFile, err := os.Create(file) if err != nil { return fmt.Errorf("creating target file: %w", err) } writer := meter.NewWriter( artifactsFile, c.TransferMeterFrequency, meter.LabelledRateFormat(os.Stdout, "Downloading artifacts", meter.UnknownTotalSize), ) // writer.Close() closes the underlying file; caller owns the writer and closes it once on return defer func() { _ = writer.Close() }() switch c.network.DownloadArtifacts(c.JobCredentials, writer, c.directDownloadFlag(retry)) { case common.DownloadSucceeded: return nil case common.DownloadNotFound: return os.ErrNotExist case common.DownloadForbidden, common.DownloadUnauthorized: return os.ErrPermission case common.DownloadFailed: return retryableErr{err: os.ErrInvalid} default: return os.ErrInvalid } } func (c *ArtifactsDownloaderCommand) Execute(cliContext *cli.Context) { log.SetRunnerFormatter() wd, err := os.Getwd() if err != nil { logrus.Fatalln("Unable to get working directory") } if c.URL == "" { logrus.Warningln("Missing URL (--url)") } if c.Token == "" { logrus.Warningln("Missing runner credentials (--token)") } if c.ID <= 0 { logrus.Warningln("Missing build ID (--id)") } if c.ID <= 0 || c.Token == "" || c.URL == "" { logrus.Fatalln("Incomplete arguments") } // Create temporary file file, err := os.CreateTemp(c.StagingDir, "artifacts") if err != nil { logrus.Fatalln(err) } _ = file.Close() defer func() { _ = os.Remove(file.Name()) }() // Download artifacts file err = c.doRetry(func(retry int) error { return c.download(file.Name(), retry) }) if err != nil { logrus.Fatalln(err) } f, size, format, err := openArchive(file.Name()) if err != nil { logrus.Fatalln(err) } defer f.Close() extractor, err := archive.NewExtractor(format, f, size, wd) if err != nil { logrus.Fatalln(err) } // Extract artifacts file err = extractor.Extract(context.Background()) if err != nil { logrus.Fatalln(err) } } var ( zstMagic = []byte{0x28, 0xB5, 0x2F, 0xFD} gzipMagic = []byte{0x1F, 0x8B} ) func openArchive(filename string) (*os.File, int64, archive.Format, error) { format := archive.Zip f, err := os.Open(filename) if err != nil { return nil, 0, format, err } var magic [4]byte _, _ = f.Read(magic[:]) _, _ = f.Seek(0, io.SeekStart) switch { case bytes.HasPrefix(magic[:], zstMagic): format = archive.TarZstd case bytes.HasPrefix(magic[:], gzipMagic): format = archive.Gzip } fi, err := f.Stat() if err != nil { f.Close() return nil, 0, format, err } return f, fi.Size(), format, nil } ================================================ FILE: commands/helpers/artifacts_downloader_test.go ================================================ //go:build !integration package helpers import ( "archive/zip" "bytes" "net/http" "net/http/httptest" "os" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers" ) var downloaderCredentials = common.JobCredentials{ ID: 1000, Token: "test", URL: "test", } func TestArtifactsDownloaderRequirements(t *testing.T) { removeHook := helpers.MakeFatalToPanic() defer removeHook() cmd := ArtifactsDownloaderCommand{} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestArtifactsDownloader(t *testing.T) { testCases := map[string]struct { downloadState common.DownloadState directDownload bool stagingDir string expectedSuccess bool expectedDownloadCalled int expectedDirectDownloadCalled int }{ "download not found": { downloadState: common.DownloadNotFound, expectedSuccess: false, expectedDownloadCalled: 1, }, "download forbidden": { downloadState: common.DownloadForbidden, expectedSuccess: false, expectedDownloadCalled: 1, }, "download unauthorized": { downloadState: common.DownloadUnauthorized, expectedSuccess: false, expectedDownloadCalled: 1, }, "retries are called": { downloadState: common.DownloadFailed, expectedSuccess: false, expectedDownloadCalled: 3, }, "first try is always direct download": { downloadState: common.DownloadFailed, directDownload: true, expectedSuccess: false, expectedDirectDownloadCalled: 1, expectedDownloadCalled: 3, }, "downloads artifact without direct download if requested": { downloadState: common.DownloadSucceeded, directDownload: false, expectedSuccess: true, expectedDirectDownloadCalled: 0, expectedDownloadCalled: 1, }, "downloads artifact with direct download if requested": { downloadState: common.DownloadSucceeded, directDownload: true, expectedSuccess: true, expectedDirectDownloadCalled: 1, expectedDownloadCalled: 1, }, "setting invalid staging directory": { downloadState: common.DownloadSucceeded, stagingDir: "/dev/null", }, } removeHook := helpers.MakeFatalToPanic() defer removeHook() // ensure clean state os.Remove(artifactsTestArchivedFile) for testName, testCase := range testCases { OnEachZipArchiver(t, func(t *testing.T) { t.Run(testName, func(t *testing.T) { network := &testNetwork{ downloadState: testCase.downloadState, } cmd := ArtifactsDownloaderCommand{ JobCredentials: downloaderCredentials, DirectDownload: testCase.directDownload, network: network, retryHelper: retryHelper{ Retry: 2, }, StagingDir: testCase.stagingDir, } // file is cleaned after running test defer os.Remove(artifactsTestArchivedFile) if testCase.expectedSuccess { require.NotPanics(t, func() { cmd.Execute(nil) }) assert.FileExists(t, artifactsTestArchivedFile) } else { require.Panics(t, func() { cmd.Execute(nil) }) } assert.Equal(t, testCase.expectedDirectDownloadCalled, network.directDownloadCalled) assert.Equal(t, testCase.expectedDownloadCalled, network.downloadCalled) }) }) } } // Some version of urfave have a bug that causes it to balk when the value of an // argument starts with a `-`. This test is here to ensure we don't up/down // grade to version of urfave with this bug. // See https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29448 and // https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29193 func Test_URFavArgParsing(t *testing.T) { app := cli.NewApp() app.Name = "gitlab-runner-helper" app.Usage = "a GitLab Runner Helper" app.Version = common.AppVersion.ShortLine() app.Commands = []cli.Command{ NewArtifactsDownloaderCommand(), } jobToken := "-Abajdbajdbajb" defer os.Remove("foo.txt") s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, jobToken, r.Header.Get("Job-Token")) w.WriteHeader(http.StatusOK) zw := zip.NewWriter(w) defer zw.Close() w1, err := zw.Create("foo.txt") require.NoError(t, err) _, err = w1.Write(bytes.Repeat([]byte("198273qhnjbqwdjbqwe2109u3abcdef3"), 1024*1024)) require.NoError(t, err) })) defer s.Close() args := []string{ "gitlab-runner-helper", "artifacts-downloader", "--url", s.URL, "--token", jobToken, "--id", "12345", } err := app.Run(args) assert.NoError(t, err) if err != nil { assert.NotContains(t, err.Error(), "WARNING: Missing build ID (--id)") assert.NotContains(t, err.Error(), "FATAL: Incomplete arguments ") } } ================================================ FILE: commands/helpers/artifacts_test.go ================================================ //go:build !integration package helpers import ( "archive/zip" "bytes" "compress/gzip" "io" "os" "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) const ( artifactsTestArchivedFile = "archive_file" artifactsTestArchivedFile2 = "archive_file2" ) var _ common.Network = (*testNetwork)(nil) type testNetwork struct { common.Network downloadState common.DownloadState downloadCalled int directDownloadCalled int uploadState common.UploadState uploadCalled int uploadFormat spec.ArtifactFormat uploadName string uploadType string uploadedFiles []string } func (m *testNetwork) DownloadArtifacts( config common.JobCredentials, artifactsFile io.WriteCloser, directDownload *bool, ) common.DownloadState { m.downloadCalled++ if directDownload != nil && *directDownload { m.directDownloadCalled++ } if m.downloadState == common.DownloadSucceeded { defer func() { _ = artifactsFile.Close() }() archive := zip.NewWriter(artifactsFile) _, _ = archive.Create(artifactsTestArchivedFile) _ = archive.Close() } return m.downloadState } func (m *testNetwork) consumeZipUpload(reader io.Reader) common.UploadState { var buffer bytes.Buffer _, _ = io.Copy(&buffer, reader) archive, err := zip.NewReader(bytes.NewReader(buffer.Bytes()), int64(buffer.Len())) if err != nil { logrus.Warningln(err) return common.UploadForbidden } for _, file := range archive.File { m.uploadedFiles = append(m.uploadedFiles, file.Name) } m.uploadFormat = spec.ArtifactFormatZip return m.uploadState } func (m *testNetwork) consumeGzipUpload(reader io.Reader) common.UploadState { var buffer bytes.Buffer _, _ = io.Copy(&buffer, reader) stream := bytes.NewReader(buffer.Bytes()) gz, err := gzip.NewReader(stream) gz.Multistream(false) if err != nil { logrus.Warningln("Invalid gzip stream") return common.UploadForbidden } // Read multiple streams for { _, err = io.Copy(io.Discard, gz) if err != nil { logrus.Warningln("Invalid gzip stream") return common.UploadForbidden } m.uploadedFiles = append(m.uploadedFiles, gz.Name) if gz.Reset(stream) == io.EOF { break } gz.Multistream(false) } m.uploadFormat = spec.ArtifactFormatGzip return m.uploadState } func (m *testNetwork) consumeRawUpload(reader io.Reader) common.UploadState { _, err := io.Copy(io.Discard, reader) if err != nil { return common.UploadFailed } m.uploadedFiles = append(m.uploadedFiles, "raw") m.uploadFormat = spec.ArtifactFormatRaw return m.uploadState } func (m *testNetwork) UploadRawArtifacts( config common.JobCredentials, bodyProvider common.ContentProvider, options common.ArtifactsOptions, ) (common.UploadState, string) { m.uploadCalled++ if bodyProvider == nil { return m.uploadState, "" } reader, err := bodyProvider.GetReader() if err != nil { return common.UploadFailed, err.Error() } if m.uploadState == common.UploadSucceeded { m.uploadType = options.Type m.uploadName = options.BaseName switch options.Format { case spec.ArtifactFormatZip, spec.ArtifactFormatDefault: return m.consumeZipUpload(reader), "" case spec.ArtifactFormatGzip: return m.consumeGzipUpload(reader), "" case spec.ArtifactFormatRaw: return m.consumeRawUpload(reader), "" default: return common.UploadForbidden, "" } } return m.uploadState, "" } func writeTestFile(t *testing.T, fileName string) { err := os.WriteFile(fileName, nil, 0o600) require.NoError(t, err, "Writing file:", fileName) } func removeTestFile(t *testing.T, fileName string) { err := os.Remove(fileName) require.NoError(t, err, "Removing file:", fileName) } ================================================ FILE: commands/helpers/artifacts_uploader.go ================================================ package helpers import ( "context" "errors" "fmt" "io" "net/url" "os" "path/filepath" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "mvdan.cc/sh/v3/shell" "gitlab.com/gitlab-org/gitlab-runner/commands" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/retry" "gitlab.com/gitlab-org/gitlab-runner/log" "gitlab.com/gitlab-org/gitlab-runner/network" ) const ( DefaultUploadName = "default" defaultTries = 3 serviceUnavailableTries = 6 ) var ( errServiceUnavailable = errors.New("service unavailable") errTooLarge = errors.New("too large") ) type ArtifactsUploaderCommand struct { common.JobCredentials fileArchiver meter.TransferMeterCommand artifactStatementGenerator newNetwork func() common.Network Name string `long:"name" description:"The name of the archive"` ExpireIn string `long:"expire-in" description:"When to expire artifacts"` Format spec.ArtifactFormat `long:"artifact-format" description:"Format of generated artifacts"` Type string `long:"artifact-type" description:"Type of generated artifacts"` CompressionLevel string `long:"compression-level" env:"ARTIFACT_COMPRESSION_LEVEL" description:"Compression level (fastest, fast, default, slow, slowest)"` Timeout time.Duration `long:"timeout" description:"Timeout for the upload operation"` ResponseHeaderTimeout time.Duration `long:"response-header-timeout" description:"Timeout for response headers"` CiDebugTrace bool `long:"ci-debug-trace" env:"CI_DEBUG_TRACE" description:"enable debug trace logging"` } func NewArtifactsUploaderCommand() cli.Command { cmd := &ArtifactsUploaderCommand{ Name: "artifacts", Timeout: common.DefaultArtifactUploadTimeout, ResponseHeaderTimeout: common.DefaultArtifactResponseHeaderTimeout, } cmd.newNetwork = func() common.Network { return network.NewGitLabClient( network.WithCertificateDirectory(commands.GetDefaultCertificateDirectory()), network.WithHttpClientOptions(network.HttpClientOptions{ Timeout: &cmd.Timeout, ResponseHeaderTimeout: &cmd.ResponseHeaderTimeout, }), ) } return common.NewCommand( "artifacts-uploader", "create and upload build artifacts (internal)", cmd, ) } func (c *ArtifactsUploaderCommand) artifactFilename(name string, format spec.ArtifactFormat) string { name = filepath.Base(name) if name == "" || name == "." { name = DefaultUploadName } switch format { case spec.ArtifactFormatZip, spec.ArtifactFormatZipZstd: return name + ".zip" case spec.ArtifactFormatGzip: return name + ".gz" case spec.ArtifactFormatTarZstd: return name + ".tar.zst" } return name } // createBodyProvider returns the artifact name and the stream provider for the request body. func (c *ArtifactsUploaderCommand) createBodyProvider() (string, common.ContentProvider) { if len(c.files) == 0 { return "", nil } format := c.Format if format == spec.ArtifactFormatDefault { format = spec.ArtifactFormatZip } filename := c.artifactFilename(c.Name, format) // Create a StreamProvider that doesn't know its content length in advance streamProvider := common.StreamProvider{ ReaderFactory: func() (io.ReadCloser, error) { pr, pw := io.Pipe() archiver, archiveErr := archive.NewArchiver(archive.Format(format), pw, c.wd, GetCompressionLevel(c.CompressionLevel)) if archiveErr != nil { pr.CloseWithError(archiveErr) return nil, archiveErr } // Start a new Goroutine to create the archive for this attempt go func() { archiveErr := archiver.Archive(context.Background(), c.files) pw.CloseWithError(archiveErr) }() meteredReader := meter.NewReader( pr, c.TransferMeterFrequency, meter.LabelledRateFormat(os.Stdout, "Uploading artifacts", meter.UnknownTotalSize), ) return meteredReader, nil }, } return filename, streamProvider } func (c *ArtifactsUploaderCommand) Run() error { artifactsName, bodyProvider := c.createBodyProvider() if bodyProvider == nil { logrus.Errorln("No files to upload") return nil } // Create the archive options := common.ArtifactsOptions{ BaseName: artifactsName, ExpireIn: c.ExpireIn, Format: c.Format, Type: c.Type, LogResponseDetails: c.CiDebugTrace, } // Upload the data resp, location := c.newNetwork().UploadRawArtifacts(c.JobCredentials, bodyProvider, options) switch resp { case common.UploadSucceeded: return nil case common.UploadRedirected: return c.handleRedirect(location) case common.UploadForbidden: return os.ErrPermission case common.UploadTooLarge: return errTooLarge case common.UploadFailed: return retryableErr{err: os.ErrInvalid} case common.UploadServiceUnavailable: return retryableErr{err: errServiceUnavailable} default: return os.ErrInvalid } } func (c *ArtifactsUploaderCommand) handleRedirect(location string) error { newURL, err := url.Parse(location) if err != nil { return retryableErr{err: fmt.Errorf("parsing new location URL: %w", err)} } newURL.RawQuery = "" newURL.Path = "" c.JobCredentials.URL = newURL.String() logrus.WithField("location", location). WithField("new-url", c.JobCredentials.URL). Info("Upload request redirected") return retryableErr{err: fmt.Errorf("request redirected")} } func (c *ArtifactsUploaderCommand) shouldRetry(tries int, err error) bool { var errAs retryableErr if !errors.As(err, &errAs) { return false } maxTries := defaultTries if errors.Is(errAs, errServiceUnavailable) { maxTries = serviceUnavailableTries } if tries >= maxTries { return false } return true } func (c *ArtifactsUploaderCommand) Execute(*cli.Context) { log.SetRunnerFormatter() c.normalizeArgs() // Enumerate files err := c.enumerate() if err != nil { logrus.Fatalln(err) } if c.GenerateArtifactsMetadata { logrus.Infof("Generating artifacts statement") metadataFile, err := c.generateStatementToFile(generateStatementOptions{ artifactName: c.Name, files: c.files, artifactsWd: c.wd, jobID: c.ID, }) if err != nil { logrus.Fatalln(err) } c.process(metadataFile) } // If the upload fails, exit with a non-zero exit code to indicate an issue? if err := retry.WithFn(c, c.Run).Run(); err != nil { logrus.Fatalln(err) } } func (c *ArtifactsUploaderCommand) NewRetry() *retry.Retry { return retry. New(). WithCheck(c.shouldRetry). WithLogrus(logrus.WithField("context", "artifacts-uploader")) } func (c *ArtifactsUploaderCommand) normalizeArgs() { if c.URL == "" || c.Token == "" { logrus.Fatalln("Missing runner credentials") } if c.ID <= 0 { logrus.Fatalln("Missing build ID") } if name, err := shell.Expand(c.Name, nil); err != nil { logrus.Warnf("invalid artifact name: %v", err) } else { c.Name = name } for idx := range c.Paths { if path, err := shell.Expand(c.Paths[idx], nil); err != nil { logrus.Warnf("invalid path %q: %v", path, err) } else { c.Paths[idx] = path } } for idx := range c.Exclude { if path, err := shell.Expand(c.Exclude[idx], nil); err != nil { logrus.Warnf("invalid path %q: %v", path, err) } else { c.Exclude[idx] = path } } } ================================================ FILE: commands/helpers/artifacts_uploader_integration_test.go ================================================ //go:build integration package helpers import ( "bytes" "context" "fmt" "io/fs" "net/http" "net/http/httptest" "os" "path/filepath" "strings" "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/fastzip" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/network" ) func TestArchiveUploadExpandArgs(t *testing.T) { srv := httptest.NewServer(nil) t.Cleanup(srv.Close) t.Setenv("expand", "expanded") cmd := &ArtifactsUploaderCommand{ Name: "artifact $expand", JobCredentials: common.JobCredentials{ ID: 12345, Token: "token", URL: srv.URL, }, } cmd.Paths = []string{"unexpanded", "path/${expand}/${expand:1:3}"} cmd.Exclude = []string{"unexpanded", "path/$expand/${foo:-bar}"} cmd.Execute(&cli.Context{}) assert.Equal(t, "artifact expanded", cmd.Name) assert.Equal(t, []string{"unexpanded", "path/expanded/xpa"}, cmd.Paths) assert.Equal(t, []string{"unexpanded", "path/expanded/bar"}, cmd.Exclude) } func TestArchiveUploadRedirect(t *testing.T) { finalRequestReceived := false finalServer := httptest.NewServer( assertRequestPathAndMethod(t, "final", finalServerHandler(t, &finalRequestReceived, "")), ) defer finalServer.Close() redirectingServer := httptest.NewServer( assertRequestPathAndMethod(t, "redirection", redirectingServerHandler(finalServer.URL)), ) defer redirectingServer.Close() cmd := &ArtifactsUploaderCommand{ JobCredentials: common.JobCredentials{ ID: 12345, Token: "token", URL: redirectingServer.URL, }, Name: "artifacts", Format: spec.ArtifactFormatZip, CompressionLevel: "fastest", newNetwork: func() common.Network { return network.NewGitLabClient() }, fileArchiver: fileArchiver{ Paths: []string{ filepath.Join(".", "testdata", "test-artifacts"), }, }, } defer helpers.MakeFatalToPanic()() assert.NotPanics(t, func() { cmd.Execute(&cli.Context{}) }, "expected command not to log fatal") assert.True(t, finalRequestReceived) } func TestArchiveUploadLogging(t *testing.T) { requestReceived := false resBody := `{"message": "some message", "debug": {"some": "data from proxy or elsewhere"}}` tests := map[string]struct { ciDebugTrace bool verify func(t *testing.T, logs string) }{ "with response logging": { ciDebugTrace: true, verify: func(t *testing.T, logs string) { assert.Contains(t, logs, resBody, "expected the raw body to be logged") assert.Contains(t, logs, "header[X-Test-Blupp]", "expected the custom response header to be logged") assert.Contains(t, logs, "[Blapp]", "expected the custom response header value to be logged") }, }, "without response logging": { verify: func(t *testing.T, logs string) { assert.NotContains(t, logs, resBody, "expected the raw body not to be logged") assert.NotContains(t, logs, "header[", "expected no header to be logged") }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { srv := httptest.NewServer( assertRequestPathAndMethod(t, "final", finalServerHandler(t, &requestReceived, resBody)), ) t.Cleanup(srv.Close) t.Cleanup(helpers.MakeFatalToPanic()) logger := logrus.StandardLogger() orgLogOutput := logger.Out t.Cleanup(func() { logger.SetOutput(orgLogOutput) }) logBuffer := &bytes.Buffer{} logger.SetOutput(logBuffer) cmd := &ArtifactsUploaderCommand{ CiDebugTrace: test.ciDebugTrace, JobCredentials: common.JobCredentials{ ID: 12345, Token: "token", URL: srv.URL, }, Name: "artifacts", Format: spec.ArtifactFormatZip, CompressionLevel: "fastest", newNetwork: func() common.Network { return network.NewGitLabClient() }, fileArchiver: fileArchiver{ Paths: []string{ filepath.Join(".", "testdata", "test-artifacts"), }, }, } assert.NotPanics(t, func() { cmd.Execute(&cli.Context{}) }, "expected command not to log fatal") assert.True(t, requestReceived, "expected to receive the upload") test.verify(t, logBuffer.String()) }) } } func assertRequestPathAndMethod(t *testing.T, handlerName string, handler http.HandlerFunc) http.HandlerFunc { return func(rw http.ResponseWriter, r *http.Request) { assert.Equal(t, http.MethodPost, r.Method) assert.Equal(t, "/api/v4/jobs/12345/artifacts", r.URL.Path, "server handler: %s", handlerName) assert.NotEqual(t, "/api/v4/jobs/12345/jobs/12345/artifacts", r.URL.Path, "server handler: %s", handlerName) handler(rw, r) } } func redirectingServerHandler(finalServerURL string) http.HandlerFunc { return func(rw http.ResponseWriter, r *http.Request) { rw.Header().Set("Location", fmt.Sprintf("%s%s", finalServerURL, r.RequestURI)) rw.WriteHeader(http.StatusTemporaryRedirect) } } func finalServerHandler(t *testing.T, finalRequestReceived *bool, resBody string) http.HandlerFunc { return func(rw http.ResponseWriter, r *http.Request) { dir := t.TempDir() receiveFile(t, r, dir) err := filepath.Walk(dir, func(path string, info fs.FileInfo, err error) error { if err != nil { return err } if info.IsDir() { return nil } fileName := info.Name() fileContentBytes, err := os.ReadFile(path) if err != nil { return err } assert.Equal(t, fileName, strings.TrimSpace(string(fileContentBytes))) return nil }) assert.NoError(t, err) *finalRequestReceived = true rw.Header().Set("Content-Type", "application/json") rw.Header().Set("X-Test-Blupp", "Blapp") rw.WriteHeader(http.StatusCreated) fmt.Fprint(rw, resBody) } } func receiveFile(t *testing.T, r *http.Request, targetDir string) { err := r.ParseMultipartForm(1024) require.NoError(t, err) formFiles := r.MultipartForm.File["file"] require.Len(t, formFiles, 1) formFile := formFiles[0] assert.Equal(t, "artifacts.zip", formFile.Filename) f, err := formFile.Open() require.NoError(t, err) defer func() { _ = f.Close() }() extractor, err := fastzip.NewExtractor(f, formFile.Size, targetDir) require.NoError(t, err) err = extractor.Extract(context.Background()) require.NoError(t, err) } ================================================ FILE: commands/helpers/artifacts_uploader_test.go ================================================ //go:build !integration package helpers import ( "errors" "io" "os" "testing" "time" "github.com/stretchr/testify/assert" mock "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/network" ) var UploaderCredentials = common.JobCredentials{ ID: 1000, Token: "test", URL: "test", } // Create a function that returns a Network interface with injected test behavior func createTestNewNetwork(testNet *testNetwork) func() common.Network { return func() common.Network { return testNet } } func TestArtifactsUploaderRequirements(t *testing.T) { removeHook := helpers.MakeFatalToPanic() defer removeHook() cmd := ArtifactsUploaderCommand{} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestArtifactsUploaderTooLarge(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadTooLarge, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.Panics(t, func() { cmd.Execute(nil) }) assert.Equal(t, 1, testNet.uploadCalled) } func TestArtifactsUploaderForbidden(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadForbidden, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.Panics(t, func() { cmd.Execute(nil) }) assert.Equal(t, 1, testNet.uploadCalled) } func TestArtifactsUploaderRetry(t *testing.T) { OnEachZipArchiver(t, func(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadFailed, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.Panics(t, func() { cmd.Execute(nil) }) assert.Equal(t, defaultTries, testNet.uploadCalled) }) } func TestArtifactsUploaderDefaultSucceeded(t *testing.T) { OnEachZipArchiver(t, func(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) cmd.Execute(nil) assert.Equal(t, 1, testNet.uploadCalled) assert.Equal(t, spec.ArtifactFormatZip, testNet.uploadFormat) assert.Equal(t, DefaultUploadName+".zip", testNet.uploadName) assert.Empty(t, testNet.uploadType) }) } func TestArtifactsUploaderZipSucceeded(t *testing.T) { OnEachZipArchiver(t, func(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, Format: spec.ArtifactFormatZip, Name: "my-release", Type: "my-type", newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) cmd.Execute(nil) assert.Equal(t, 1, testNet.uploadCalled) assert.Equal(t, spec.ArtifactFormatZip, testNet.uploadFormat) assert.Equal(t, "my-release.zip", testNet.uploadName) assert.Equal(t, "my-type", testNet.uploadType) assert.Contains(t, testNet.uploadedFiles, artifactsTestArchivedFile) }) } func TestArtifactsUploaderGzipSendsMultipleFiles(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, Format: spec.ArtifactFormatGzip, Name: "junit.xml", Type: "junit", newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile, artifactsTestArchivedFile2}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) writeTestFile(t, artifactsTestArchivedFile2) defer os.Remove(artifactsTestArchivedFile) cmd.Execute(nil) assert.Equal(t, 1, testNet.uploadCalled) assert.Equal(t, "junit.xml.gz", testNet.uploadName) assert.Equal(t, spec.ArtifactFormatGzip, testNet.uploadFormat) assert.Equal(t, "junit", testNet.uploadType) assert.Contains(t, testNet.uploadedFiles, artifactsTestArchivedFile) assert.Contains(t, testNet.uploadedFiles, artifactsTestArchivedFile2) } func TestArtifactsUploaderRawSucceeded(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, Format: spec.ArtifactFormatRaw, Name: "my-release", Type: "my-type", newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) cmd.Execute(nil) assert.Equal(t, 1, testNet.uploadCalled) assert.Equal(t, spec.ArtifactFormatRaw, testNet.uploadFormat) assert.Equal(t, "my-release", testNet.uploadName) assert.Equal(t, "my-type", testNet.uploadType) assert.Contains(t, testNet.uploadedFiles, "raw") } func TestArtifactsUploaderRawDoesNotSendMultipleFiles(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, Format: spec.ArtifactFormatRaw, Name: "junit.xml", Type: "junit", newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile, artifactsTestArchivedFile2}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) writeTestFile(t, artifactsTestArchivedFile2) defer os.Remove(artifactsTestArchivedFile2) removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.Panics(t, func() { cmd.Execute(nil) }) } func TestArtifactsUploaderNoFilesDoNotGenerateError(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{}, } removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.NotPanics(t, func() { cmd.Execute(nil) }) } func TestArtifactsUploaderServiceUnavailable(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadServiceUnavailable, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) removeHook := helpers.MakeFatalToPanic() defer removeHook() assert.Panics(t, func() { cmd.Execute(nil) }) assert.Equal(t, serviceUnavailableTries, testNet.uploadCalled) } func TestArtifactsExcludedPaths(t *testing.T) { testNet := &testNetwork{ uploadState: common.UploadSucceeded, } cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), Format: spec.ArtifactFormatRaw, fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, Exclude: []string{"something/**"}, }, } writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) cmd.Execute(nil) assert.Equal(t, 1, testNet.uploadCalled) } func TestFileArchiverCompressionLevel(t *testing.T) { writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) testNet := &testNetwork{ uploadState: common.UploadSucceeded, } for _, expectedLevel := range []string{"fastest", "fast", "default", "slow", "slowest"} { t.Run(expectedLevel, func(t *testing.T) { mockArchiver := archive.NewMockArchiver(t) // Save previous archiver and restore it after test to prevent // goroutine assertions from affecting subsequent tests prevArchiver, _ := archive.Register( "zip", func(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { assert.Equal(t, GetCompressionLevel(expectedLevel), level) return mockArchiver, nil }, nil, ) defer func() { archive.Register("zip", prevArchiver, nil) }() mockArchiver.On("Archive", mock.Anything, mock.Anything).Return(nil) cmd := ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, newNetwork: createTestNewNetwork(testNet), Format: spec.ArtifactFormatZip, fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, CompressionLevel: expectedLevel, } assert.NoError(t, cmd.enumerate()) _, bodyProvider := cmd.createBodyProvider() r, err := bodyProvider.GetReader() require.NoError(t, err) defer r.Close() _, _ = io.Copy(io.Discard, r) }) } } func TestArtifactUploaderCommandShouldRetry(t *testing.T) { tests := map[string]struct { err error tries int expectedShouldRetry bool }{ "no error, first try": { err: nil, tries: 1, expectedShouldRetry: false, }, "random error, first try": { err: errors.New("err"), tries: 1, expectedShouldRetry: false, }, "retryable error, first try": { err: retryableErr{}, tries: 1, expectedShouldRetry: true, }, "retryable error, max tries": { err: retryableErr{}, tries: defaultTries, expectedShouldRetry: false, }, "retryable error, over max tries limit": { err: retryableErr{}, tries: defaultTries + 10, expectedShouldRetry: false, }, "retryable error, before reaching service unavailable tries": { err: retryableErr{err: errServiceUnavailable}, tries: serviceUnavailableTries - 1, expectedShouldRetry: true, }, "retryable error service unavailable, max tries": { err: retryableErr{err: errServiceUnavailable}, tries: serviceUnavailableTries, expectedShouldRetry: false, }, "retryable error service unavailable, over max errors limit": { err: retryableErr{err: errServiceUnavailable}, tries: serviceUnavailableTries + 10, expectedShouldRetry: false, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { r := ArtifactsUploaderCommand{} assert.Equal(t, tt.expectedShouldRetry, r.shouldRetry(tt.tries, tt.err)) }) } } func TestNewArtifactsUploaderCommandDefaultTimeouts(t *testing.T) { cmd := NewArtifactsUploaderCommand() var capturedTimeout, capturedResponseHeaderTimeout time.Duration cmd.Action = func(c *cli.Context) { capturedTimeout = c.Duration("timeout") capturedResponseHeaderTimeout = c.Duration("response-header-timeout") } app := cli.NewApp() app.Commands = []cli.Command{cmd} err := app.Run([]string{"app", "artifacts-uploader", "--url", "https://example.com", "--token", "test-token", "--id", "1", }) require.NoError(t, err) assert.Equal(t, common.DefaultArtifactUploadTimeout, capturedTimeout) assert.Equal(t, common.DefaultArtifactResponseHeaderTimeout, capturedResponseHeaderTimeout) } type timeoutTestFixture struct { timeout time.Duration responseHeaderTimeout time.Duration mockNetwork *testNetwork executeCommand bool expectedError bool } func (f *timeoutTestFixture) setupCommand() *ArtifactsUploaderCommand { cmd := &ArtifactsUploaderCommand{ JobCredentials: UploaderCredentials, Timeout: f.timeout, ResponseHeaderTimeout: f.responseHeaderTimeout, fileArchiver: fileArchiver{ Paths: []string{artifactsTestArchivedFile}, }, } if f.mockNetwork != nil { cmd.newNetwork = createTestNewNetwork(f.mockNetwork) } else { // Use real network client creation to test timeout value propagation cmd.newNetwork = func() common.Network { return network.NewGitLabClient( network.WithHttpClientOptions(network.HttpClientOptions{ Timeout: &cmd.Timeout, ResponseHeaderTimeout: &cmd.ResponseHeaderTimeout, }), ) } } return cmd } func TestArtifactsUploaderCommandTimeouts(t *testing.T) { tests := map[string]struct { fixture *timeoutTestFixture expectedTimeout time.Duration expectedResponseHeaderTimeout time.Duration expectedUploadCalled int }{ "uses timeout values when creating network client": { fixture: &timeoutTestFixture{ timeout: time.Hour, responseHeaderTimeout: 10 * time.Minute, executeCommand: false, }, expectedTimeout: time.Hour, expectedResponseHeaderTimeout: 10 * time.Minute, }, "zero timeout values work": { fixture: &timeoutTestFixture{ timeout: 0, responseHeaderTimeout: 0, executeCommand: false, }, expectedTimeout: 0, expectedResponseHeaderTimeout: 0, }, "timeout values passed to network client when no injected network": { fixture: &timeoutTestFixture{ timeout: time.Minute, responseHeaderTimeout: 30 * time.Second, executeCommand: true, expectedError: true, }, expectedTimeout: time.Minute, expectedResponseHeaderTimeout: 30 * time.Second, }, "injected network takes precedence over timeout values": { fixture: &timeoutTestFixture{ timeout: time.Hour, responseHeaderTimeout: 10 * time.Minute, mockNetwork: &testNetwork{ uploadState: common.UploadSucceeded, }, executeCommand: true, expectedError: false, }, expectedTimeout: time.Hour, expectedResponseHeaderTimeout: 10 * time.Minute, expectedUploadCalled: 1, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) cmd := tt.fixture.setupCommand() // Verify timeout values are set correctly assert.Equal(t, tt.expectedTimeout, cmd.Timeout) assert.Equal(t, tt.expectedResponseHeaderTimeout, cmd.ResponseHeaderTimeout) // Execute command if required by the test case if tt.fixture.executeCommand { err := cmd.enumerate() require.NoError(t, err) err = cmd.Run() if tt.fixture.expectedError { assert.Error(t, err) } else { assert.NoError(t, err) } if tt.fixture.mockNetwork != nil { assert.Equal(t, tt.expectedUploadCalled, tt.fixture.mockNetwork.uploadCalled) } } }) } } ================================================ FILE: commands/helpers/cache_archiver.go ================================================ package helpers import ( "bufio" "context" "encoding/json" "errors" "fmt" "io" "io/fs" "net/http" "net/url" "os" "path/filepath" "strings" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gocloud.dev/blob" _ "gocloud.dev/blob/azureblob" // Needed to register the Azure driver _ "gocloud.dev/blob/s3blob" // Needed to register the AWS S3 driver "mvdan.cc/sh/v3/shell" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" url_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/url" "gitlab.com/gitlab-org/gitlab-runner/log" ) type CacheArchiverCommand struct { fileArchiver retryHelper meter.TransferMeterCommand File string `long:"file" description:"The path to file"` AlternateFile string `long:"alternate-file" description:"(temporary) Alternate local cache file path (e.g. unhashed name) to rename to --file if --file does not exist"` URL string `long:"url" description:"URL of remote cache resource (pre-signed URL)"` CheckURL string `long:"check-url" description:"(temporary) Pre-signed HEAD URL to check whether the primary cache object already exists"` GoCloudURL string `long:"gocloud-url" description:"Go Cloud URL of remote cache resource (requires credentials)"` Timeout int `long:"timeout" description:"Overall timeout for cache uploading request (in minutes)"` Headers []string `long:"header" description:"HTTP headers to send with PUT request (in form of 'key:value')"` Metadata metadata `long:"metadata" env:"CACHE_METADATA" description:"Metadata for the cache artifact (JSON encoded key-value-pairs, e.g. '{\"foo\":\"bar\",\"blerp\":\"blip\"}')"` CompressionLevel string `long:"compression-level" env:"CACHE_COMPRESSION_LEVEL" description:"Compression level (fastest, fast, default, slow, slowest)"` CompressionFormat string `long:"compression-format" env:"CACHE_COMPRESSION_FORMAT" description:"Compression format (zip, tarzstd)"` MaxUploadedArchiveSize int64 `long:"max-uploaded-archive-size" env:"CACHE_MAX_UPLOADED_ARCHIVE_SIZE" description:"Limit the size of the cache archive being uploaded to cloud storage, in bytes."` EnvFile string `long:"env-file" description:"Filename containing environment variables to read"` // Transfer options (all backends: presigned S3, GoCloud S3/Azure/GCS). TransferBufferSize int `long:"transfer-buffer-size" env:"CACHE_TRANSFER_BUFFER_SIZE" description:"Buffer size in bytes for streaming cache upload/download (default 4 MiB)"` ChunkSize int `long:"chunk-size" env:"CACHE_CHUNK_SIZE" description:"Part/chunk size in bytes for GoCloud upload when FF_USE_PARALLEL_CACHE_TRANSFER is enabled (default 16 MiB)"` Concurrency int `long:"concurrency" env:"CACHE_CONCURRENCY" description:"Concurrent parts for GoCloud multipart upload when FF_USE_PARALLEL_CACHE_TRANSFER is enabled (default 16; otherwise 1)"` client *CacheClient mux *blob.URLMux } func NewCacheArchiverCommand() cli.Command { return common.NewCommand( "cache-archiver", "create and upload cache artifacts (internal)", &CacheArchiverCommand{ retryHelper: retryHelper{ Retry: 2, RetryTime: time.Second, }, TransferBufferSize: defaultCacheTransferBufferSize, ChunkSize: defaultCacheChunkSize, Concurrency: defaultCacheConcurrency, }, ) } type metadata map[string]string func (m *metadata) UnmarshalFlag(raw string) error { return json.Unmarshal([]byte(raw), m) } func (c *CacheArchiverCommand) getClient() *CacheClient { if c.client == nil { c.client = NewCacheClient(c.Timeout) } return c.client } func (c *CacheArchiverCommand) upload(_ int) error { file, err := os.Open(c.File) if err != nil { return err } defer func() { _ = file.Close() }() fi, err := file.Stat() if err != nil { return err } rc := meter.NewReader( file, c.TransferMeterFrequency, meter.LabelledRateFormat(os.Stdout, "Uploading cache", fi.Size()), ) defer rc.Close() if c.GoCloudURL != "" { logrus.Infoln("Using GoCloud URL for cache upload") return c.handleGoCloudURL(rc) } logrus.Infoln("Using presigned URL for cache upload") return c.handlePresignedURL(fi, rc) } func (c *CacheArchiverCommand) handlePresignedURL(fi os.FileInfo, file io.ReadCloser) error { logrus.Infoln("Uploading", filepath.Base(c.File), "to", url_helpers.CleanURL(c.URL)) // Use a buffered body so the HTTP client reads in larger chunks (improves S3 upload throughput). body := struct { io.Reader io.Closer }{bufio.NewReaderSize(file, c.TransferBufferSize), file} req, err := http.NewRequest(http.MethodPut, c.URL, body) if err != nil { return retryableErr{err: err} } c.setHeaders(req, fi) req.ContentLength = fi.Size() resp, err := c.getClient().Do(req) if err != nil { return retryableErr{err: err} } defer func() { _ = resp.Body.Close() }() return retryOnServerError(resp) } func (c *CacheArchiverCommand) handleGoCloudURL(file io.Reader) error { logrus.Infoln("Uploading", filepath.Base(c.File), "to", url_helpers.CleanURL(c.GoCloudURL)) if c.mux == nil { c.mux = blob.DefaultURLMux() } ctx, cancelWrite := context.WithCancel(context.Background()) defer cancelWrite() u, err := url.Parse(c.GoCloudURL) if err != nil { return err } err = loadEnvFile(c.EnvFile) if err != nil { return err } objectName := strings.TrimLeft(u.Path, "/") if objectName == "" { return fmt.Errorf("no object name provided") } b, err := c.mux.OpenBucket(ctx, c.GoCloudURL) if err != nil { return err } defer b.Close() writerOpts := &blob.WriterOptions{ Metadata: c.Metadata, BufferSize: c.ChunkSize, MaxConcurrency: c.Concurrency, } ffLogger := logrus.WithField("name", featureflags.UseParallelCacheTransfer) if !featureflags.IsOn(ffLogger, os.Getenv(featureflags.UseParallelCacheTransfer)) { writerOpts.MaxConcurrency = 1 } writer, err := b.NewWriter(ctx, objectName, writerOpts) if err != nil { return err } buf := make([]byte, c.TransferBufferSize) if _, err = io.CopyBuffer(writer, file, buf); err != nil { cancelWrite() if writerErr := writer.Close(); writerErr != nil { logrus.WithError(writerErr).Error("error closing Go cloud upload after copy failure") } return err } if err := writer.Close(); err != nil { return err } return nil } func (c *CacheArchiverCommand) createZipFile(filename string) (int64, error) { err := os.MkdirAll(filepath.Dir(filename), 0o700) if err != nil { return 0, err } f, err := os.CreateTemp(filepath.Dir(filename), "archive_") if err != nil { return 0, err } defer os.Remove(f.Name()) defer f.Close() logrus.Debugln("Temporary file:", f.Name()) switch strings.ToLower(c.CompressionFormat) { case string(spec.ArtifactFormatTarZstd): c.CompressionFormat = string(spec.ArtifactFormatTarZstd) default: c.CompressionFormat = string(spec.ArtifactFormatZip) } archiver, err := archive.NewArchiver(archive.Format(c.CompressionFormat), f, c.wd, GetCompressionLevel(c.CompressionLevel)) if err != nil { return 0, err } // Create archive err = archiver.Archive(context.Background(), c.files) if err != nil { return 0, err } info, err := f.Stat() if err != nil { return 0, err } err = f.Close() if err != nil { return 0, err } return info.Size(), os.Rename(f.Name(), filename) } func (c *CacheArchiverCommand) tryRenameAlternateFile() { if c.AlternateFile == "" || c.AlternateFile == c.File { return } _, err := os.Stat(c.File) if err == nil { logrus.Debugln("Primary cache file already exists locally, skipping rename from alternate") return } if !errors.Is(err, fs.ErrNotExist) { logrus.WithError(err).Warningln("Failed to stat primary cache file") return } if _, err := os.Stat(c.AlternateFile); err != nil { logrus.Debugln("Alternate cache file not found locally, nothing to rename") return } if err := os.MkdirAll(filepath.Dir(c.File), 0o700); err != nil { logrus.WithError(err).Warningln("Failed to create directory for cache file rename") return } if err := os.Rename(c.AlternateFile, c.File); err != nil { logrus.WithError(err).Warningln("Failed to rename alternate cache file to primary") return } logrus.Infoln("Renamed alternate cache file to primary") } func (c *CacheArchiverCommand) Execute(*cli.Context) { log.SetRunnerFormatter() c.normalizeArgs() c.tryRenameAlternateFile() if err := validateCacheTransferTuning(c.TransferBufferSize, c.ChunkSize, c.Concurrency); err != nil { logrus.Fatalln(err) } // Enumerate files err := c.enumerate() if err != nil { logrus.Fatalln(err) } // Skip upload if no files were found if len(c.files) == 0 { logrus.Warningln("No files to cache.") return } // Check if list of files changed if !c.isFileChanged(c.File) { if c.AlternateFile != c.File { // AlternateFile is set (FF_HASH_CACHE_KEYS compatibility mode): the primary // archive may have been downloaded from the alternate URL by the extractor, // meaning the primary remote URL does not yet have an object. Upload the // existing archive to ensure the primary URL is populated. // This handles both transition directions: // FF false→true: primary=hashed, alternate=unhashed // FF true→false: primary=unhashed, alternate=hashed c.uploadExistingArchiveIfNeeded() return } logrus.Infoln("Archive is up to date!") return } // Create archive size, err := c.createZipFile(c.File) if err != nil { logrus.Fatalln(err) } err = writeCacheMetadataFile(c.File, c.Metadata) if err != nil { logrus.Fatalln(err) } c.uploadArchiveIfNeeded(size) } func (c *CacheArchiverCommand) normalizeArgs() { if c.File == "" { logrus.Fatalln("Missing --file") } if c.TransferBufferSize == 0 { c.TransferBufferSize = defaultCacheTransferBufferSize } if c.ChunkSize == 0 { c.ChunkSize = defaultCacheChunkSize } if c.Concurrency == 0 { c.Concurrency = defaultCacheConcurrency } for idx := range c.Paths { if path, err := shell.Expand(c.Paths[idx], nil); err != nil { logrus.Warnf("invalid path %q: %v", path, err) } else { c.Paths[idx] = path } } for idx := range c.Exclude { if path, err := shell.Expand(c.Exclude[idx], nil); err != nil { logrus.Warnf("invalid path %q: %v", path, err) } else { c.Exclude[idx] = path } } } // uploadExistingArchiveIfNeeded uploads the local cache archive to the primary remote URL // if the archive exists locally and the primary remote does not yet have an object. func (c *CacheArchiverCommand) uploadExistingArchiveIfNeeded() { fi, err := os.Stat(c.File) if err != nil { return } if c.primaryRemoteExists() { logrus.Infoln("Primary cache already exists remotely, skipping upload") } else { logrus.Infoln("Primary cache does not exist remotely, uploading existing archive") c.uploadArchiveIfNeeded(fi.Size()) } } // primaryRemoteExists reports whether the primary remote cache object already exists. // Returns true only when the object is confirmed present; returns false on any error or absence. func (c *CacheArchiverCommand) primaryRemoteExists() bool { if c.GoCloudURL != "" { return c.primaryGoCloudExists() } if c.CheckURL != "" { return c.primaryPresignedExists() } return false } func (c *CacheArchiverCommand) primaryPresignedExists() bool { resp, err := c.getClient().Head(c.CheckURL) if err != nil { logrus.WithError(err).Warningln("Failed to check primary cache existence via HEAD request, assuming absent") return false } defer func() { _ = resp.Body.Close() }() exists := resp.StatusCode == http.StatusOK logrus.WithField("status", resp.StatusCode).Debugln("Primary cache HEAD request completed") return exists } func (c *CacheArchiverCommand) primaryGoCloudExists() bool { if c.mux == nil { c.mux = blob.DefaultURLMux() } ctx := context.Background() if err := loadEnvFile(c.EnvFile); err != nil { return false } u, err := url.Parse(c.GoCloudURL) if err != nil { return false } objectName := strings.TrimLeft(u.Path, "/") if objectName == "" { return false } b, err := c.mux.OpenBucket(ctx, c.GoCloudURL) if err != nil { return false } defer b.Close() _, err = b.Attributes(ctx, objectName) if err != nil { logrus.WithField("object", objectName).Debugln("Primary cache object not found in remote storage") return false } logrus.WithField("object", objectName).Debugln("Primary cache object found in remote storage") return true } func (c *CacheArchiverCommand) uploadArchiveIfNeeded(size int64) { if c.URL == "" && c.GoCloudURL == "" { logrus.Infoln( "No URL provided, cache will not be uploaded to shared cache server. " + "Cache will be stored only locally.") return } if c.MaxUploadedArchiveSize != 0 && size > c.MaxUploadedArchiveSize { logrus.Infoln(fmt.Sprintf("Cache archive size (%d) is too big (Limit is set to %d). "+ "Cache will be stored only locally.", size, c.MaxUploadedArchiveSize)) return } err := c.doRetry(c.upload) if err != nil { logrus.Fatalln(err) } } func (c *CacheArchiverCommand) setHeaders(req *http.Request, fi os.FileInfo) { for k, v := range split(c.Headers) { req.Header.Set(strings.TrimSpace(k), strings.TrimSpace(v)) } // Set default headers. But don't override custom Content-Type. if req.Header.Get(common.ContentType) == "" { req.Header.Set(common.ContentType, "application/octet-stream") } req.Header.Set("Last-Modified", fi.ModTime().UTC().Format(http.TimeFormat)) } func split(raw []string) map[string]string { const sep = ":" data := make(map[string]string, len(raw)) for _, s := range raw { k, v, ok := strings.Cut(s, sep) if !ok { continue } data[k] = v } return data } ================================================ FILE: commands/helpers/cache_archiver_integration_test.go ================================================ //go:build integration package helpers_test import ( "bytes" "context" "fmt" "io" "net/http" "net/http/httptest" "net/url" "os" "testing" "time" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gocloud.dev/blob" "gocloud.dev/blob/fileblob" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/common" testHelpers "gitlab.com/gitlab-org/gitlab-runner/helpers" ) const ( cacheArchiverArchive = "archive.zip" cacheArchiverMetadata = "metadata.json" cacheArchiverTestArchivedFile = "archive_file" cacheExtractorTestArchivedFile = "archive_file" ) func TestCacheArchiveLocalMetadata(t *testing.T) { tests := map[string]struct { metaArgs map[string]string expectedLocalMetadata string }{ "no metadata": { expectedLocalMetadata: "{}", }, "single metadata": { metaArgs: map[string]string{"foo": "bar:baz"}, expectedLocalMetadata: `{"foo":"bar:baz"}`, }, "multiple metadata": { metaArgs: map[string]string{"Foo": "some Foo", "bAr": "some Bar"}, expectedLocalMetadata: `{"bar":"some Bar","foo":"some Foo"}`, }, "weird metadata": { metaArgs: map[string]string{"foo": ` - bla - bla - some: {random: thing} - \x63\xb3 - bla`}, expectedLocalMetadata: `{"foo":"\n- bla\n- bla\n- some: {random: thing}\n- \\x63\\xb3\n- bla"}`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) srv := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) t.Cleanup(func() { srv.Close() require.NoError(t, os.Remove(cacheArchiverArchive)) require.NoError(t, os.Remove(cacheArchiverMetadata)) }) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: srv.URL + "/cache.zip", Metadata: test.metaArgs, Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} cmd.Execute(&cli.Context{}) require.FileExists(t, cacheArchiverMetadata) content, err := os.ReadFile(cacheArchiverMetadata) require.NoError(t, err, "reading local metadata file") require.Equal(t, test.expectedLocalMetadata, string(content), "wrong local metadata") }) } } func TestCacheArchiverUploadExpandArgs(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) defer srv.Close() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) t.Setenv("expand", "expanded") cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: srv.URL + "/cache.zip", Timeout: 0, } cmd.Paths = []string{"unexpanded", "path/${expand}/${expand:1:3}"} cmd.Exclude = []string{"unexpanded", "path/$expand/${foo:-bar}"} cmd.Execute(&cli.Context{}) assert.Equal(t, []string{"unexpanded", "path/expanded/xpa"}, cmd.Paths) assert.Equal(t, []string{"unexpanded", "path/expanded/bar"}, cmd.Exclude) } func TestCacheArchiverIsUpToDate(t *testing.T) { helpers.OnEachZipArchiver(t, func(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverArchive) cmd := helpers.NewCacheArchiverCommandForTest(cacheArchiverArchive, []string{cacheArchiverTestArchivedFile}) cmd.Execute(nil) fi, _ := os.Stat(cacheArchiverArchive) cmd.Execute(nil) fi2, _ := os.Stat(cacheArchiverArchive) assert.Equal(t, fi.ModTime(), fi2.ModTime(), "archive is up to date") // We need to wait one second, since the FS doesn't save milliseconds time.Sleep(time.Second) err := os.Chtimes(cacheArchiverTestArchivedFile, time.Now(), time.Now()) assert.NoError(t, err) cmd.Execute(nil) fi3, _ := os.Stat(cacheArchiverArchive) assert.NotEqual(t, fi.ModTime(), fi3.ModTime(), "archive should get updated") }) } func TestCacheArchiverForIfNoFileDefined(t *testing.T) { removeHook := testHelpers.MakeFatalToPanic() defer removeHook() cmd := helpers.CacheArchiverCommand{} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestCacheArchiverRemoteServerNotFound(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) ts := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) defer ts.Close() removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: ts.URL + "/invalid-file.zip", Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestCacheArchiverRemoteServer(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) ts := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) defer ts.Close() removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: ts.URL + "/cache.zip", Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} assert.NotPanics(t, func() { cmd.Execute(nil) }) } func TestCacheArchiverGoCloudRemoteServer(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) mux, bucketDir := setupGoCloudFileBucket(t, "testblob") objectName := "path/to/cache.zip" removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, GoCloudURL: fmt.Sprintf("testblob://bucket/%s", objectName), Metadata: map[string]string{"foo": "some foo", "bar": "some bar"}, Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} helpers.SetCacheArchiverCommandMux(&cmd, mux) assert.NotPanics(t, func() { cmd.Execute(nil) }) attrs := goCloudObjectAttributes(t, bucketDir, objectName) assert.Equal(t, map[string]string{ "foo": "some foo", "bar": "some bar", }, attrs.Metadata, "wrong blob metadata") } func TestCacheArchiverRemoteServerWithHeaders(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(testCacheUploadWithCustomHeaders)) defer ts.Close() removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: ts.URL + "/cache.zip", Headers: []string{"Content-Type: application/zip", "x-ms-blob-type: BlockBlob "}, Timeout: 0, } assert.NotPanics(t, func() { cmd.Execute(nil) }) } func TestCacheArchiverRemoteServerTimedOut(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) ts := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) defer ts.Close() output := logrus.StandardLogger().Out var buf bytes.Buffer logrus.SetOutput(&buf) defer logrus.SetOutput(output) removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: ts.URL + "/timeout", } cmd.Paths = []string{cacheArchiverTestArchivedFile} helpers.SetCacheArchiverCommandClientTimeout(&cmd, 1*time.Millisecond) assert.Panics(t, func() { cmd.Execute(nil) }) assert.Contains(t, buf.String(), "Client.Timeout") } func TestCacheArchiverRemoteServerFailOnInvalidServer(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) removeHook := testHelpers.MakeFatalToPanic() defer removeHook() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: "http://localhost:65333/cache.zip", Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestCacheArchiverCompressionLevel(t *testing.T) { writeTestFile(t, cacheArchiverTestArchivedFile) defer os.Remove(cacheArchiverTestArchivedFile) for _, expectedLevel := range []string{"fastest", "fast", "default", "slow", "slowest"} { t.Run(expectedLevel, func(t *testing.T) { mockArchiver := archive.NewMockArchiver(t) prevArchiver, _ := archive.Register( "zip", func(w io.Writer, dir string, level archive.CompressionLevel) (archive.Archiver, error) { assert.Equal(t, helpers.GetCompressionLevel(expectedLevel), level) return mockArchiver, nil }, nil, ) defer archive.Register( "zip", prevArchiver, nil, ) mockArchiver.On("Archive", mock.Anything, mock.Anything).Return(nil) defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.NewCacheArchiverCommandForTest(cacheArchiverArchive, []string{cacheArchiverTestArchivedFile}) cmd.CompressionLevel = expectedLevel cmd.Execute(nil) }) } } type dirOpener struct { tmpDir string } func (o *dirOpener) OpenBucketURL(_ context.Context, u *url.URL) (*blob.Bucket, error) { return fileblob.OpenBucket(o.tmpDir, nil) } func setupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string) { tmpDir := t.TempDir() mux := new(blob.URLMux) fake := &dirOpener{tmpDir: tmpDir} mux.RegisterBucket(scheme, fake) return mux, tmpDir } // goCloudObjectAttributes pulls the attributes of a blob. It fails the test if the blob does not exist or the // attributes can't be retrieved func goCloudObjectAttributes(t *testing.T, bucketDir string, objectName string) *blob.Attributes { bucket, err := fileblob.OpenBucket(bucketDir, nil) require.NoError(t, err, "opening bucket") ctx, cancel := context.WithCancel(context.Background()) defer cancel() exists, err := bucket.Exists(ctx, objectName) require.NoError(t, err, "querying blob existence") require.True(t, exists, "blob does not exist") attr, err := bucket.Attributes(ctx, objectName) require.NoError(t, err, "getting blob attributes") return attr } func testCacheBaseUploadHandler(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodPut { http.Error(w, "405 Method not allowed", http.StatusMethodNotAllowed) return } if r.URL.Path != "/cache.zip" { if r.URL.Path == "/timeout" { time.Sleep(50 * time.Millisecond) } http.NotFound(w, r) return } } func testCacheUploadHandler(w http.ResponseWriter, r *http.Request) { testCacheBaseUploadHandler(w, r) if r.Header.Get(common.ContentType) != "application/octet-stream" { http.Error(w, "500 Wrong Content-Type header", http.StatusInternalServerError) return } if r.Header.Get("Last-Modified") == "" { http.Error(w, "500 Missing Last-Modified header", http.StatusInternalServerError) return } } func testCacheUploadWithCustomHeaders(w http.ResponseWriter, r *http.Request) { testCacheBaseUploadHandler(w, r) if r.Header.Get(common.ContentType) != "application/zip" { http.Error(w, "500 Wrong Content-Type header", http.StatusInternalServerError) } if r.Header.Get("x-ms-blob-type") != "BlockBlob" { http.Error(w, "500 Wrong x-ms-blob-type header", http.StatusInternalServerError) } if r.Header.Get("Last-Modified") == "" { http.Error(w, "500 Expected Last-Modified header included", http.StatusInternalServerError) } } func writeTestFile(t *testing.T, fileName string) { err := os.WriteFile(fileName, nil, 0600) require.NoError(t, err, "Writing file:", fileName) } func TestCacheArchiverUploadedSize(t *testing.T) { // Pre-compute the actual archive size to avoid hardcoding an implementation-specific value. require.NoError(t, os.WriteFile(cacheArchiverTestArchivedFile, []byte("test content for cache"), 0600)) sizeCmd := helpers.NewCacheArchiverCommandForTest(cacheArchiverArchive, []string{cacheArchiverTestArchivedFile}) sizeCmd.Execute(nil) fi, err := os.Stat(cacheArchiverArchive) require.NoError(t, err, "measuring archive size") archiveSize := int(fi.Size()) os.Remove(cacheArchiverTestArchivedFile) os.Remove(cacheArchiverArchive) os.Remove(cacheArchiverMetadata) tests := map[string]struct { limit int exceeded bool }{ "no-limit": {limit: 0, exceeded: false}, "above-limit": {limit: 100, exceeded: true}, "equal-limit": {limit: archiveSize, exceeded: false}, "below-limit": {limit: archiveSize + 100, exceeded: false}, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { err := os.WriteFile(cacheArchiverTestArchivedFile, []byte("test content for cache"), 0600) require.NoError(t, err) defer os.Remove(cacheArchiverTestArchivedFile) defer logrus.SetOutput(logrus.StandardLogger().Out) defer testHelpers.MakeFatalToPanic()() var buf bytes.Buffer logrus.SetOutput(&buf) ts := httptest.NewServer(http.HandlerFunc(testCacheBaseUploadHandler)) defer ts.Close() defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, MaxUploadedArchiveSize: int64(tc.limit), URL: ts.URL + "/cache.zip", Timeout: 0, } cmd.Paths = []string{cacheArchiverTestArchivedFile} assert.NotPanics(t, func() { cmd.Execute(nil) }) if tc.exceeded { require.Contains(t, buf.String(), "too big") } else { require.NotContains(t, buf.String(), "too big") } }) } } func TestCacheArchiverSkipsEmptyCache(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(testCacheUploadHandler)) defer ts.Close() defer logrus.SetOutput(logrus.StandardLogger().Out) var buf bytes.Buffer logrus.SetOutput(&buf) defer os.Remove(cacheArchiverArchive) defer os.Remove(cacheArchiverMetadata) cmd := helpers.CacheArchiverCommand{ File: cacheArchiverArchive, URL: ts.URL + "/cache.zip", Timeout: 0, } cmd.Paths = []string{"/nonexistent/path/that/does/not/exist"} assert.NotPanics(t, func() { cmd.Execute(nil) }) assert.Contains(t, buf.String(), "No files to cache") _, err := os.Stat(cacheArchiverArchive) assert.Error(t, err, "archive file should not be created for empty cache") assert.True(t, os.IsNotExist(err), "archive file should not exist") _, err = os.Stat(cacheArchiverMetadata) assert.Error(t, err, "metadata file should not be created for empty cache") assert.True(t, os.IsNotExist(err), "metadata file should not exist") } ================================================ FILE: commands/helpers/cache_archiver_test.go ================================================ //go:build !integration package helpers import ( "net/http" "net/http/httptest" "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestUploadExistingArchiveIfNeeded(t *testing.T) { tests := map[string]struct { setupFile bool provideCheckURL bool headStatus int expectUpload bool }{ "local file missing": { setupFile: false, expectUpload: false, }, "file exists, remote exists": { setupFile: true, provideCheckURL: true, headStatus: http.StatusOK, expectUpload: false, }, "file exists, remote missing": { setupFile: true, provideCheckURL: true, headStatus: http.StatusNotFound, expectUpload: true, }, "file exists, no check URL": { setupFile: true, expectUpload: true, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { tmpDir := t.TempDir() primaryFile := filepath.Join(tmpDir, "cache.zip") uploaded := false srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.Method { case http.MethodHead: w.WriteHeader(tc.headStatus) case http.MethodPut: uploaded = true w.WriteHeader(http.StatusOK) } })) defer srv.Close() if tc.setupFile { require.NoError(t, os.WriteFile(primaryFile, []byte("cache content"), 0o600)) } cmd := &CacheArchiverCommand{ File: primaryFile, URL: srv.URL + "/upload", } if tc.provideCheckURL { cmd.CheckURL = srv.URL + "/check" } cmd.uploadExistingArchiveIfNeeded() assert.Equal(t, tc.expectUpload, uploaded) }) } } func TestTryRenameAlternateFile(t *testing.T) { tests := map[string]struct { setupAlternate bool setupPrimary bool noAlternateSet bool // pass empty string as AlternateFile sameAsPrimary bool // AlternateFile == File primaryInSubdir bool // primary lives in a subdirectory that doesn't exist yet expectRename bool }{ "no alternate file set": { noAlternateSet: true, expectRename: false, }, "alternate same as primary": { sameAsPrimary: true, expectRename: false, }, "primary exists, alternate exists": { setupPrimary: true, setupAlternate: true, expectRename: false, }, "primary missing, alternate missing": { setupAlternate: false, expectRename: false, }, "primary missing, alternate exists": { setupAlternate: true, expectRename: true, }, "primary missing, alternate exists, primary dir missing": { setupAlternate: true, primaryInSubdir: true, expectRename: true, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { tmpDir := t.TempDir() primaryFile := filepath.Join(tmpDir, "cache.zip") if tc.primaryInSubdir { primaryFile = filepath.Join(tmpDir, "newsubdir", "cache.zip") } alternateFile := filepath.Join(tmpDir, "old-cache.zip") switch { case tc.noAlternateSet: alternateFile = "" case tc.sameAsPrimary: alternateFile = primaryFile } if tc.setupPrimary { require.NoError(t, os.WriteFile(primaryFile, []byte("primary"), 0o600)) } if tc.setupAlternate { require.NoError(t, os.WriteFile(alternateFile, []byte("alternate"), 0o600)) } cmd := &CacheArchiverCommand{ File: primaryFile, AlternateFile: alternateFile, } cmd.tryRenameAlternateFile() if tc.expectRename { assert.FileExists(t, primaryFile, "primary file should exist after rename") assert.NoFileExists(t, alternateFile, "alternate file should be gone after rename") content, err := os.ReadFile(primaryFile) require.NoError(t, err) assert.Equal(t, "alternate", string(content), "primary file should contain former alternate content") } else { if tc.setupPrimary { content, err := os.ReadFile(primaryFile) require.NoError(t, err) assert.Equal(t, "primary", string(content), "primary file should be unchanged") } if tc.setupAlternate && alternateFile != primaryFile { assert.FileExists(t, alternateFile, "alternate file should be untouched") } } }) } } ================================================ FILE: commands/helpers/cache_client.go ================================================ package helpers import ( "net" "net/http" "time" "gitlab.com/gitlab-org/gitlab-runner/common" ) type CacheClient struct { http.Client } func (c *CacheClient) prepareClient(timeout int) { if timeout > 0 { c.Timeout = time.Duration(timeout) * time.Minute } else { c.Timeout = time.Duration(common.DefaultCacheRequestTimeout) * time.Minute } } func (c *CacheClient) prepareTransport() { c.Transport = &http.Transport{ Proxy: http.ProxyFromEnvironment, DialContext: (&net.Dialer{ Timeout: 30 * time.Second, KeepAlive: 30 * time.Second, }).DialContext, IdleConnTimeout: 90 * time.Second, TLSHandshakeTimeout: 10 * time.Second, ExpectContinueTimeout: 10 * time.Second, ResponseHeaderTimeout: 30 * time.Second, DisableCompression: true, } } func NewCacheClient(timeout int) *CacheClient { client := &CacheClient{} client.prepareClient(timeout) client.prepareTransport() return client } ================================================ FILE: commands/helpers/cache_defaults.go ================================================ package helpers import "fmt" // Default sizes for cache-extractor and cache-archiver transfer tuning (overridden by CLI / env). const ( defaultCacheTransferBufferSize = 4 * 1024 * 1024 // 4 MiB defaultCacheChunkSize = 16 * 1024 * 1024 // 16 MiB defaultCacheConcurrency = 16 // logFieldHTTPETag is the structured log key for the HTTP ETag header (snake_case). Not defined in labkit/fields yet. logFieldHTTPETag = "etag" ) // validateCacheTransferTuning checks values after normalize* maps 0 to defaults. // Negative sizes bypass normalization and must be rejected so allocation and blob options do not panic or misbehave. func validateCacheTransferTuning(transferBufferSize, chunkSize, concurrency int) error { if transferBufferSize <= 0 { return fmt.Errorf("invalid cache transfer buffer size %d (CACHE_TRANSFER_BUFFER_SIZE / --transfer-buffer-size): must be positive; use 0 for default %d bytes", transferBufferSize, defaultCacheTransferBufferSize) } if chunkSize < 0 { return fmt.Errorf("invalid cache chunk size %d (CACHE_CHUNK_SIZE / --chunk-size): must be non-negative; use 0 for default %d bytes", chunkSize, defaultCacheChunkSize) } if concurrency < 0 { return fmt.Errorf("invalid cache concurrency %d (CACHE_CONCURRENCY / --concurrency): must be non-negative", concurrency) } return nil } ================================================ FILE: commands/helpers/cache_defaults_test.go ================================================ //go:build !integration package helpers import ( "testing" "github.com/stretchr/testify/require" ) func TestValidateCacheTransferTuning(t *testing.T) { t.Parallel() require.NoError(t, validateCacheTransferTuning( defaultCacheTransferBufferSize, defaultCacheChunkSize, defaultCacheConcurrency, )) require.NoError(t, validateCacheTransferTuning(1, 0, 0)) err := validateCacheTransferTuning(0, defaultCacheChunkSize, defaultCacheConcurrency) require.Error(t, err) require.Contains(t, err.Error(), "transfer buffer size") err = validateCacheTransferTuning(-1, defaultCacheChunkSize, defaultCacheConcurrency) require.Error(t, err) require.Contains(t, err.Error(), "transfer buffer size") err = validateCacheTransferTuning(defaultCacheTransferBufferSize, -1, defaultCacheConcurrency) require.Error(t, err) require.Contains(t, err.Error(), "chunk size") err = validateCacheTransferTuning(defaultCacheTransferBufferSize, defaultCacheChunkSize, -1) require.Error(t, err) require.Contains(t, err.Error(), "concurrency") } ================================================ FILE: commands/helpers/cache_env.go ================================================ package helpers import ( "fmt" "os" "github.com/joho/godotenv" ) func loadEnvFile(filename string) error { if filename == "" { return nil } env, err := godotenv.Read(filename) if err != nil { return fmt.Errorf("failed to read env file: %w", err) } for key, value := range env { if err := os.Setenv(key, value); err != nil { return fmt.Errorf("failed to set environment variable %s: %w", key, err) } } return nil } ================================================ FILE: commands/helpers/cache_env_test.go ================================================ //go:build !integration package helpers import ( "os" "strings" "testing" "github.com/stretchr/testify/assert" ) func TestLoadEnvFile(t *testing.T) { tmpfile, err := os.CreateTemp("", "test.env") assert.NoError(t, err) defer os.Remove(tmpfile.Name()) _, err = tmpfile.WriteString("TEST_KEY1=TEST_VALUE1\nTEST_KEY2=TEST_VALUE2") assert.NoError(t, err) tmpfile.Close() tests := map[string]struct { envFile string expectError bool setup func() check func(*testing.T) }{ "empty env file": { envFile: "", expectError: false, }, "missing env file": { envFile: "non_existent_file.env", expectError: true, }, "successful env file load": { envFile: tmpfile.Name(), expectError: false, check: func(t *testing.T) { assert.Equal(t, "TEST_VALUE1", os.Getenv("TEST_KEY1")) assert.Equal(t, "TEST_VALUE2", os.Getenv("TEST_KEY2")) }, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { if tc.setup != nil { tc.setup() } originalEnv := os.Environ() defer func() { os.Clearenv() for _, envVar := range originalEnv { parts := strings.SplitN(envVar, "=", 2) os.Setenv(parts[0], parts[1]) } }() err := loadEnvFile(tc.envFile) if tc.expectError { assert.Error(t, err) } else { assert.NoError(t, err) } if tc.check != nil { tc.check(t) } }) } } ================================================ FILE: commands/helpers/cache_extractor.go ================================================ package helpers import ( "context" "fmt" "io" "net/http" "net/url" "os" "path/filepath" "strconv" "strings" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/transfer" url_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/url" "gitlab.com/gitlab-org/gitlab-runner/log" "gocloud.dev/blob" _ "gocloud.dev/blob/azureblob" // Needed to register the Azure driver _ "gocloud.dev/blob/s3blob" // Needed to register the AWS S3 driver "gocloud.dev/gcerrors" ) type CacheExtractorCommand struct { retryHelper meter.TransferMeterCommand File string `long:"file" description:"The file containing your cache artifacts"` URL string `long:"url" description:"URL of remote cache resource"` GoCloudURL string `long:"gocloud-url" description:"Go Cloud URL of remote cache resource (requires credentials)"` Timeout int `long:"timeout" description:"Overall timeout for cache downloading request (in minutes)"` EnvFile string `long:"env-file" description:"Filename containing environment variables to read"` // Transfer options (all backends: presigned S3, GoCloud S3/Azure/GCS). TransferBufferSize int `long:"transfer-buffer-size" env:"CACHE_TRANSFER_BUFFER_SIZE" description:"Buffer size in bytes for streaming cache download (default 4 MiB)"` // Parallel download (presigned or GoCloud) requires FF_USE_PARALLEL_CACHE_TRANSFER. Concurrency > 1 for parallel. ChunkSize int `long:"chunk-size" env:"CACHE_CHUNK_SIZE" description:"Chunk size in bytes for parallel cache download when FF_USE_PARALLEL_CACHE_TRANSFER is enabled (default 16 MiB; 0 falls back to default)"` Concurrency int `long:"concurrency" env:"CACHE_CONCURRENCY" description:"Concurrent chunks for parallel cache transfer when FF_USE_PARALLEL_CACHE_TRANSFER is enabled (default 16; 0 or 1 = sequential for download)"` client *CacheClient mux *blob.URLMux } func NewCacheExtractorCommand() cli.Command { return common.NewCommand( "cache-extractor", "download and extract cache artifacts (internal)", &CacheExtractorCommand{ retryHelper: retryHelper{ Retry: 2, RetryTime: time.Second, }, TransferBufferSize: defaultCacheTransferBufferSize, ChunkSize: defaultCacheChunkSize, Concurrency: defaultCacheConcurrency, }, ) } // normalizeExtractorArgs applies defaults for transfer buffer and chunk size when unset (0), matching // CacheArchiverCommand.normalizeArgs for those fields. Concurrency is intentionally not normalized to the // default here: 0 or 1 mean sequential download (see presignedParallelDownloadEligible). func (c *CacheExtractorCommand) normalizeExtractorArgs() { if c.TransferBufferSize == 0 { c.TransferBufferSize = defaultCacheTransferBufferSize } if c.ChunkSize == 0 { c.ChunkSize = defaultCacheChunkSize } } func (c *CacheExtractorCommand) getClient() *CacheClient { if c.client == nil { c.client = NewCacheClient(c.Timeout) } return c.client } func checkIfUpToDate(path string, resp *http.Response) (bool, time.Time) { date, _ := time.Parse(http.TimeFormat, resp.Header.Get("Last-Modified")) return isLocalCacheFileUpToDate(path, date), date } func isLocalCacheFileUpToDate(path string, date time.Time) bool { fi, _ := os.Lstat(path) return fi != nil && !date.After(fi.ModTime()) } func getRemoteCacheSize(resp *http.Response) int64 { length, _ := strconv.Atoi(resp.Header.Get("Content-Length")) if length <= 0 { return meter.UnknownTotalSize } return int64(length) } func (c *CacheExtractorCommand) download(_ int) error { err := os.MkdirAll(filepath.Dir(c.File), 0o700) if err != nil { return err } if c.GoCloudURL != "" { logrus.Infoln("Using GoCloud URL for cache download") return c.handleGoCloudURL() } logrus.Infoln("Using presigned URL for cache download") return c.handlePresignedURL() } func (c *CacheExtractorCommand) getCache() (*http.Response, error) { resp, err := c.getClient().Get(c.URL) if err != nil { return nil, retryableErr{err: err} } if resp.StatusCode == http.StatusNotFound { _ = resp.Body.Close() return nil, os.ErrNotExist } return resp, retryOnServerError(resp) } // goCloudURLSchemeAssumesRangeSupport reports whether the Go CDK blob driver for this URL scheme // is expected to support NewRangeReader without a per-download probe (S3, GCS, Azure Blob). // Custom or test schemes (e.g. fileblob behind a custom name) still use gocloudSupportsRange. func goCloudURLSchemeAssumesRangeSupport(scheme string) bool { switch strings.ToLower(scheme) { case "s3", "gs", "azblob": return true default: return false } } // gocloudSupportsRange probes the bucket with a single-byte range read; success = supported. func (c *CacheExtractorCommand) gocloudSupportsRange(ctx context.Context, b *blob.Bucket, objectName string) bool { rr, err := b.NewRangeReader(ctx, objectName, 0, 1, nil) if err != nil { return false } _ = rr.Close() return true } func (c *CacheExtractorCommand) gocloudParallelRangeSupported(ctx context.Context, scheme string, b *blob.Bucket, objectName string) bool { if goCloudURLSchemeAssumesRangeSupport(scheme) { return true } return c.gocloudSupportsRange(ctx, b, objectName) } func (c *CacheExtractorCommand) handlePresignedURL() error { if c.presignedParallelDownloadEligible() { done, err := c.tryPresignedParallelDownload() if err != nil { return err } if done { return nil } } return c.downloadPresignedSequential() } func (c *CacheExtractorCommand) presignedParallelDownloadEligible() bool { logger := logrus.WithField("name", featureflags.UseParallelCacheTransfer) return featureflags.IsOn(logger, os.Getenv(featureflags.UseParallelCacheTransfer)) && c.Concurrency > 1 } // tryPresignedParallelDownload uses a single-byte Range GET (not HEAD: presigned S3 URLs are typically // signed for GET only). A 206 response yields Content-Length/Content-Range for parallel chunk GETs. // It returns done=true when the download path finished (including up-to-date short-circuit or parallel // download); err propagates parallel download failures. done=false, err=nil means fall back to a full GET. func (c *CacheExtractorCommand) tryPresignedParallelDownload() (done bool, err error) { req, reqErr := http.NewRequest(http.MethodGet, c.URL, nil) if reqErr != nil { return false, nil } req.Header.Set("Range", "bytes=0-0") resp, doErr := c.getClient().Do(req) if doErr != nil || resp == nil { return false, nil } if resp.StatusCode != http.StatusPartialContent { if resp.StatusCode == http.StatusOK { logrus.Infoln("Presigned URL did not honor Range request, using sequential download") } _ = resp.Body.Close() return false, nil } contentLength, ok := transfer.ParseContentRangeTotal(resp.Header.Get("Content-Range")) if !ok { _ = resp.Body.Close() return false, nil } date, _ := time.Parse(http.TimeFormat, resp.Header.Get("Last-Modified")) if isLocalCacheFileUpToDate(c.File, date) { _, _ = io.Copy(io.Discard, io.LimitReader(resp.Body, transfer.RangeProbeBodyMaxDiscard)) _ = resp.Body.Close() logrus.Infoln(filepath.Base(c.File), "is up to date") return true, nil } chunkSize := c.effectiveParallelChunkSize() if contentLength <= int64(chunkSize) { _, _ = io.Copy(io.Discard, io.LimitReader(resp.Body, transfer.RangeProbeBodyMaxDiscard)) _ = resp.Body.Close() return false, nil } _, _ = io.Copy(io.Discard, io.LimitReader(resp.Body, transfer.RangeProbeBodyMaxDiscard)) _ = resp.Body.Close() cleanedURL := url_helpers.CleanURL(c.URL) err = c.downloadParallel(contentLength, date, resp.Header.Get("ETag"), cleanedURL, headersToCacheMetadata(resp.Header), c.presignedRangeFetchChunk()) return true, err } func (c *CacheExtractorCommand) downloadPresignedSequential() error { resp, err := c.getCache() if err != nil { return err } defer func() { _ = resp.Body.Close() }() upToDate, date := checkIfUpToDate(c.File, resp) if upToDate { logrus.Infoln(filepath.Base(c.File), "is up to date") return nil } etag := resp.Header.Get("ETag") cleanedURL := url_helpers.CleanURL(c.URL) contentLength := getRemoteCacheSize(resp) return c.downloadAndSaveCache(resp.Body, date, etag, cleanedURL, contentLength, headersToCacheMetadata(resp.Header)) } func (c *CacheExtractorCommand) effectiveParallelChunkSize() int { if c.ChunkSize <= 0 { return defaultCacheChunkSize } return c.ChunkSize } func (c *CacheExtractorCommand) presignedRangeFetchChunk() func(offset, length int64) (io.ReadCloser, error) { return func(offset, length int64) (io.ReadCloser, error) { req, err := http.NewRequest(http.MethodGet, c.URL, nil) if err != nil { return nil, err } req.Header.Set("Range", fmt.Sprintf("bytes=%d-%d", offset, offset+length-1)) resp, err := c.getClient().Do(req) if err != nil { return nil, err } if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusPartialContent { _ = resp.Body.Close() return nil, fmt.Errorf("range request failed: %s", resp.Status) } return resp.Body, nil } } //nolint:gocognit // setup and parallel vs sequential branches func (c *CacheExtractorCommand) handleGoCloudURL() error { if c.mux == nil { c.mux = blob.DefaultURLMux() } ctx, cancelWrite := context.WithCancel(context.Background()) defer cancelWrite() u, err := url.Parse(c.GoCloudURL) if err != nil { return err } err = loadEnvFile(c.EnvFile) if err != nil { return err } objectName := strings.TrimLeft(u.Path, "/") if objectName == "" { return fmt.Errorf("no object name provided") } b, err := c.mux.OpenBucket(ctx, c.GoCloudURL) if err != nil { return err } defer b.Close() attrs, err := b.Attributes(ctx, objectName) if err != nil { // Ignore 404 errors if gcerrors.Code(err) == gcerrors.NotFound { return nil } // GoCloud returns the Unknown code at the moment when Forbidden is returned until // https://github.com/google/go-cloud/pull/3663 is merged. if u.Scheme == "s3" && strings.Contains(err.Error(), "StatusCode: 403") { return fmt.Errorf("%w: This 403 is expected if the file doesn't exist. See the behavior of HeadObject without s3::ListBucket permissions (https://docs.aws.amazon.com/AmazonS3/latest/API/API_HeadObject.html).", err) } return err } if isLocalCacheFileUpToDate(c.File, attrs.ModTime) { logrus.Infoln(filepath.Base(c.File), "is up to date") return nil } cleanedURL := url_helpers.CleanURL(c.GoCloudURL) // Use parallel range reads when FF_USE_PARALLEL_CACHE_TRANSFER is enabled, Concurrency > 1, and backend supports range. logger := logrus.WithField("name", featureflags.UseParallelCacheTransfer) if featureflags.IsOn(logger, os.Getenv(featureflags.UseParallelCacheTransfer)) && c.Concurrency > 1 && attrs.Size > 0 { //nolint:nestif if c.gocloudParallelRangeSupported(ctx, u.Scheme, b, objectName) { if attrs.Size > int64(c.effectiveParallelChunkSize()) { fetchChunk := func(offset, length int64) (io.ReadCloser, error) { return b.NewRangeReader(ctx, objectName, offset, length, nil) } return c.downloadParallel(attrs.Size, attrs.ModTime, attrs.ETag, cleanedURL, attrs.Metadata, fetchChunk) } } else { logrus.Infoln("GoCloud backend does not support range reads, using sequential download") } } reader, err := b.NewReader(ctx, objectName, nil) if err != nil { return err } defer reader.Close() return c.downloadAndSaveCache(reader, attrs.ModTime, attrs.ETag, cleanedURL, attrs.Size, attrs.Metadata) } // downloadParallel writes content via concurrent range fetches using WriteAt at chunk offsets (bounded memory); the meter counts bytes via WriteAt. fetchChunk returns a reader for the given byte range; caller closes it. func (c *CacheExtractorCommand) downloadParallel(contentLength int64, modTime time.Time, etag, cleanedURL string, metadata map[string]string, fetchChunk func(offset, length int64) (io.ReadCloser, error)) error { //nolint:gocognit file, err := os.CreateTemp(filepath.Dir(c.File), "cache") if err != nil { return err } tmpName := file.Name() defer func() { _ = os.Remove(tmpName) }() name := strings.TrimSuffix(filepath.Base(c.File), filepath.Ext(c.File)) if etag != "" { logrus.WithField(logFieldHTTPETag, etag).Infoln("Downloading", name, "from", cleanedURL, "(parallel)") } else { logrus.Infoln("Downloading", name, "from", cleanedURL, "(parallel)") } writer := meter.NewWriter( file, c.TransferMeterFrequency, meter.LabelledRateFormat(os.Stdout, "Downloading cache", contentLength), ) // writer.Close() closes the underlying file; we must not call file.Close() and we close writer only once (on each exit path below) chunkSize := int64(c.effectiveParallelChunkSize()) concurrency := c.Concurrency if concurrency < 1 { concurrency = 1 } destAt, ok := writer.(io.WriterAt) if !ok { _ = writer.Close() return fmt.Errorf("parallel cache download requires destination that implements io.WriterAt") } err = transfer.ParallelRangeDownload(contentLength, chunkSize, concurrency, destAt, fetchChunk) if err != nil { _ = writer.Close() return retryableErr{err: err} } if err := writer.Close(); err != nil { return err } // file is closed by writer.Close(); do not call file.Close() if err := os.Chtimes(tmpName, time.Now(), modTime); err != nil { return err } if err := os.Rename(tmpName, c.File); err != nil { return fmt.Errorf("renaming: %w", err) } return writeCacheMetadataFile(c.File, metadata) } func (c *CacheExtractorCommand) downloadAndSaveCache(reader io.Reader, date time.Time, etag, cleanedURL string, contentLength int64, metadata map[string]string) error { file, err := os.CreateTemp(filepath.Dir(c.File), "cache") if err != nil { return err } tmpName := file.Name() defer func() { _ = os.Remove(tmpName) }() // For legacy purposes, caches written to disk use the extension `.zip` // even when a different compression format is used. To avoid confusion, // we avoid the extension name in logs. name := strings.TrimSuffix(filepath.Base(c.File), filepath.Ext(c.File)) if etag != "" { logrus.WithField(logFieldHTTPETag, etag).Infoln("Downloading", name, "from", cleanedURL) } else { logrus.Infoln("Downloading", name, "from", cleanedURL) } writer := meter.NewWriter( file, c.TransferMeterFrequency, meter.LabelledRateFormat(os.Stdout, "Downloading cache", contentLength), ) // writer.Close() closes the underlying file; close writer only once per exit path (same as downloadParallel). buf := make([]byte, c.TransferBufferSize) _, err = io.CopyBuffer(writer, reader, buf) if err != nil { _ = writer.Close() return retryableErr{err: err} } err = os.Chtimes(tmpName, time.Now(), date) if err != nil { _ = writer.Close() return err } if err := writer.Close(); err != nil { return err } if err := os.Rename(tmpName, c.File); err != nil { return fmt.Errorf("renaming: %w", err) } return writeCacheMetadataFile(c.File, metadata) } func (c *CacheExtractorCommand) Execute(cliContext *cli.Context) { log.SetRunnerFormatter() c.normalizeExtractorArgs() if err := validateCacheTransferTuning(c.TransferBufferSize, c.ChunkSize, c.Concurrency); err != nil { logrus.Fatalln(err) } wd, err := os.Getwd() if err != nil { logrus.Fatalln("Unable to get working directory") } if c.File == "" { warningln("Missing cache file") } if c.URL != "" || c.GoCloudURL != "" { err := c.doRetry(c.download) if err != nil { warningln(err) } } else { logrus.Infoln( "No URL provided, cache will not be downloaded from shared cache server. " + "Instead a local version of cache will be extracted.") } f, size, format, err := openArchive(c.File) if os.IsNotExist(err) { warningln("Cache file does not exist") } if err != nil { logrus.Fatalln(err) } defer f.Close() extractor, err := archive.NewExtractor(format, f, size, wd) if err != nil { logrus.Fatalln(err) } err = extractor.Extract(context.Background()) if err != nil { logrus.Fatalln(err) } } func warningln(args interface{}) { logrus.Warningln(args) logrus.Exit(1) } ================================================ FILE: commands/helpers/cache_extractor_test.go ================================================ //go:build !integration package helpers import ( "archive/zip" "bytes" "context" "encoding/json" "fmt" "io" "net/http" "net/http/httptest" "net/url" "os" "path" "strconv" "strings" "testing" "time" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gocloud.dev/blob" "gocloud.dev/blob/fileblob" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) const ( cacheExtractorArchive = "archive.zip" cacheExtractorMetadata = "metadata.json" cacheExtractorTestArchivedFile = "archive_file" cacheExtractorTestFile = "test_file" ) type dirOpener struct { tmpDir string } func (o *dirOpener) OpenBucketURL(_ context.Context, u *url.URL) (*blob.Bucket, error) { return fileblob.OpenBucket(o.tmpDir, nil) } func setupGoCloudFileBucket(t *testing.T, scheme string) (m *blob.URLMux, bucketDir string) { tmpDir := t.TempDir() mux := new(blob.URLMux) fake := &dirOpener{tmpDir: tmpDir} mux.RegisterBucket(scheme, fake) return mux, tmpDir } func writeZipFile(t *testing.T, filename string) { var buf bytes.Buffer zipWriter := zip.NewWriter(&buf) f, err := zipWriter.Create(cacheExtractorTestArchivedFile) require.NoError(t, err) _, err = io.WriteString(f, "This is a test.") require.NoError(t, err) err = zipWriter.Close() require.NoError(t, err) outFile, err := os.Create(filename) require.NoError(t, err) defer outFile.Close() _, err = buf.WriteTo(outFile) if err != nil { require.NoError(t, err) } } func writeZipFileAndMetadata(t *testing.T, filename string) { writeZipFile(t, filename) attrFile := filename + ".attrs" json, err := json.Marshal(map[string]any{ "user.metadata": map[string]string{ "foo": "some foo", "blank": "", }, }) require.NoError(t, err, "marshaling blob attributes") err = os.WriteFile(attrFile, json, 0640) require.NoError(t, err, "writing blob attributes sidecar file") } func TestCacheExtractorValidArchive(t *testing.T) { expectedContents := bytes.Repeat([]byte("198273qhnjbqwdjbqwe2109u3abcdef3"), 1024*1024) OnEachZipExtractor(t, func(t *testing.T) { file, err := os.Create(cacheExtractorArchive) assert.NoError(t, err) defer file.Close() defer os.Remove(file.Name()) defer os.Remove(cacheExtractorTestArchivedFile) defer os.Remove(cacheExtractorTestFile) archive := zip.NewWriter(file) _, err = archive.Create(cacheExtractorTestArchivedFile) require.NoError(t, err) w, err := archive.Create(cacheExtractorTestFile) require.NoError(t, err) _, err = w.Write(expectedContents) require.NoError(t, err) archive.Close() _, err = os.Stat(cacheExtractorTestArchivedFile) require.Error(t, err) cmd := CacheExtractorCommand{ File: cacheExtractorArchive, } assert.NotPanics(t, func() { cmd.Execute(nil) }) _, err = os.Stat(cacheExtractorTestArchivedFile) assert.NoError(t, err) contents, err := os.ReadFile(cacheExtractorTestFile) assert.NoError(t, err) assert.Equal(t, expectedContents, contents) }) } func TestCacheExtractorForInvalidArchive(t *testing.T) { OnEachZipExtractor(t, func(t *testing.T) { removeHook := helpers.MakeFatalToPanic() defer removeHook() writeTestFile(t, cacheExtractorArchive) defer os.Remove(cacheExtractorArchive) cmd := CacheExtractorCommand{ File: cacheExtractorArchive, } assert.Panics(t, func() { cmd.Execute(nil) }) }) } func TestCacheExtractorForIfNoFileDefined(t *testing.T) { removeHook := helpers.MakeWarningToPanic() defer removeHook() cmd := CacheExtractorCommand{} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestCacheExtractorForNotExistingFile(t *testing.T) { removeHook := helpers.MakeWarningToPanic() defer removeHook() cmd := CacheExtractorCommand{ File: "/../../../test.zip", } assert.Panics(t, func() { cmd.Execute(nil) }) } func testServeCacheWithETag(w http.ResponseWriter, r *http.Request) { w.Header().Set("ETag", "some-etag") testServeCache(w, r) } func testServeCache(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet { http.Error(w, "408 Method not allowed", http.StatusRequestTimeout) return } if r.URL.Path != "/cache.zip" { if r.URL.Path == "/timeout" { time.Sleep(50 * time.Millisecond) } http.NotFound(w, r) return } w.Header().Set("Last-Modified", time.Now().UTC().Format(http.TimeFormat)) w.Header().Set("x-fakeCloud-meta-foo", "some foo") w.Header().Set("x-random", "ignored") w.Header().Set("x-fakeClound-meta-blank", "") archive := zip.NewWriter(w) _, _ = archive.Create(cacheExtractorTestArchivedFile) archive.Close() } func TestCacheExtractorRemoteServerNotFound(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(testServeCache)) defer ts.Close() removeHook := helpers.MakeWarningToPanic() defer removeHook() cmd := CacheExtractorCommand{ File: "non-existing-test.zip", URL: ts.URL + "/invalid-file.zip", Timeout: 0, } assert.Panics(t, func() { cmd.Execute(nil) }) _, err := os.Stat(cacheExtractorTestArchivedFile) assert.Error(t, err) } func TestCacheExtractorRemoteServerTimedOut(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(testServeCache)) defer ts.Close() output := logrus.StandardLogger().Out var buf bytes.Buffer logrus.SetOutput(&buf) defer logrus.SetOutput(output) removeHook := helpers.MakeWarningToPanic() defer removeHook() cmd := CacheExtractorCommand{ File: "non-existing-test.zip", URL: ts.URL + "/timeout", } cmd.getClient().Timeout = 1 * time.Millisecond assert.Panics(t, func() { cmd.Execute(nil) }) assert.Contains(t, buf.String(), "Client.Timeout") _, err := os.Stat(cacheExtractorTestArchivedFile) assert.Error(t, err) } func TestCacheExtractorRemoteServer(t *testing.T) { testCases := map[string]struct { handler http.Handler goCloudURL bool }{ "no ETag": { handler: http.HandlerFunc(testServeCache), }, "ETag": { handler: http.HandlerFunc(testServeCacheWithETag), }, "GoCloud URL": { goCloudURL: true, }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { cdTempDir(t) removeHook := helpers.MakeWarningToPanic() t.Cleanup(removeHook) cmd := CacheExtractorCommand{ File: cacheExtractorArchive, Timeout: 0, } if tc.goCloudURL { mux, tmpDir := setupGoCloudFileBucket(t, "testblob") cmd.mux = mux cmd.GoCloudURL = fmt.Sprintf("testblob://bucket/%s", cacheExtractorArchive) testFile := path.Join(tmpDir, cacheExtractorArchive) writeZipFileAndMetadata(t, testFile) } else { ts := httptest.NewServer(tc.handler) t.Cleanup(ts.Close) cmd.URL = ts.URL + "/cache.zip" } assert.NotPanics(t, func() { cmd.Execute(nil) }) assert.FileExists(t, cacheExtractorTestArchivedFile, "cache file does not exist") err := os.Chtimes(cacheExtractorArchive, time.Now().Add(time.Hour), time.Now().Add(time.Hour)) assert.NoError(t, err) assert.FileExists(t, cacheExtractorMetadata, "cache metadata does not exist") data, err := os.ReadFile(cacheExtractorMetadata) assert.NoError(t, err, "reading cache metadata content") assert.Equal(t, `{"blank":"","foo":"some foo"}`, string(data), "unexpected cache metadata content") assert.NotPanics(t, func() { cmd.Execute(nil) }, "archive is up to date") }) } } func TestCacheExtractorRemoteServerFailOnInvalidServer(t *testing.T) { removeHook := helpers.MakeWarningToPanic() t.Cleanup(removeHook) cmd := CacheExtractorCommand{ File: cacheExtractorArchive, URL: "http://localhost:65333/cache.zip", Timeout: 0, } assert.Panics(t, func() { cmd.Execute(nil) }) _, err := os.Stat(cacheExtractorTestArchivedFile) assert.Error(t, err) } func TestIsLocalCacheFileUpToDate(t *testing.T) { tmpDir := t.TempDir() cacheFile := path.Join(tmpDir, "cache-file") // Create cache file err := os.WriteFile(cacheFile, []byte("test content"), 0644) require.NoError(t, err) // Set a specific modification time modTime := time.Now() err = os.Chtimes(cacheFile, modTime, modTime) require.NoError(t, err) // Test when remote file is older (cache is up to date) result := isLocalCacheFileUpToDate(cacheFile, modTime.Add(-1*time.Hour)) require.True(t, result, "Cache should be up to date when remote file is older") // Test when remote file is newer (cache is outdated) result = isLocalCacheFileUpToDate(cacheFile, modTime.Add(1*time.Hour)) require.False(t, result, "Cache should be outdated when remote file is newer") } // cdTempDir creates a temp dir and changes into it; after the test this directory is cleaned up automatically. func cdTempDir(t *testing.T) { t.Helper() pwd, err := os.Getwd() require.NoError(t, err, "getting current PWD") d := t.TempDir() require.NoError(t, os.Chdir(d), "changing into temp dir") t.Cleanup(func() { require.NoError(t, os.Chdir(pwd), "changing back into previous PWD") }) } // parallelTestZipBytes returns the same archive bytes as writeZipFile (small zip > default parallel chunk for tests that set a tiny ChunkSize). func parallelTestZipBytes(t *testing.T) []byte { t.Helper() var buf bytes.Buffer zipWriter := zip.NewWriter(&buf) f, err := zipWriter.Create(cacheExtractorTestArchivedFile) require.NoError(t, err) _, err = io.WriteString(f, "This is a test.") require.NoError(t, err) require.NoError(t, zipWriter.Close()) return buf.Bytes() } func parseBytesRangeHeader(h string) (start, end int64, ok bool) { const prefix = "bytes=" if !strings.HasPrefix(h, prefix) { return 0, 0, false } h = h[len(prefix):] i := strings.IndexByte(h, '-') if i < 0 { return 0, 0, false } start, err1 := strconv.ParseInt(h[:i], 10, 64) end, err2 := strconv.ParseInt(h[i+1:], 10, 64) if err1 != nil || err2 != nil { return 0, 0, false } return start, end, true } // testParallelPresignedCacheHandler serves a fixed payload with 206 + Content-Range for every Range GET (probe + chunk fetches). func testParallelPresignedCacheHandler(t *testing.T, payload []byte, lastModified string) http.HandlerFunc { t.Helper() return func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodGet || r.URL.Path != "/cache.zip" { http.NotFound(w, r) return } rangeHdr := r.Header.Get("Range") start, end, ok := parseBytesRangeHeader(rangeHdr) if !ok { http.Error(w, "missing or invalid Range", http.StatusBadRequest) return } if start < 0 || end < start || start >= int64(len(payload)) { http.Error(w, "range not satisfiable", http.StatusRequestedRangeNotSatisfiable) return } if end >= int64(len(payload)) { end = int64(len(payload)) - 1 } seg := payload[start : end+1] w.Header().Set("Content-Type", "application/zip") w.Header().Set("Last-Modified", lastModified) w.Header().Set("x-fakeCloud-meta-foo", "some foo") w.Header().Set("x-fakeCloud-meta-blank", "") w.Header().Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", start, end, len(payload))) w.WriteHeader(http.StatusPartialContent) _, _ = w.Write(seg) } } // TestCacheExtractorPresignedParallelTransfer exercises tryPresignedParallelDownload: Range probe, parallel chunk GETs, and WriteAt via the meter writer. func TestCacheExtractorPresignedParallelTransfer(t *testing.T) { t.Setenv(featureflags.UseParallelCacheTransfer, "true") cdTempDir(t) removeHook := helpers.MakeWarningToPanic() t.Cleanup(removeHook) payload := parallelTestZipBytes(t) require.Greater(t, len(payload), 32, "payload should span multiple parallel chunks") lm := time.Date(2020, 5, 1, 12, 0, 0, 0, time.UTC).Format(http.TimeFormat) ts := httptest.NewServer(testParallelPresignedCacheHandler(t, payload, lm)) t.Cleanup(ts.Close) cmd := CacheExtractorCommand{ File: cacheExtractorArchive, URL: ts.URL + "/cache.zip", Timeout: 0, ChunkSize: 7, Concurrency: 4, // TransferMeterFrequency left at 0 so meter.NewWriter returns *os.File (io.WriterAt) for parallel download. } assert.NotPanics(t, func() { cmd.Execute(nil) }) assert.FileExists(t, cacheExtractorTestArchivedFile) assert.FileExists(t, cacheExtractorMetadata) data, err := os.ReadFile(cacheExtractorMetadata) require.NoError(t, err) assert.JSONEq(t, `{"blank":"","foo":"some foo"}`, string(data)) got, err := os.ReadFile(cacheExtractorArchive) require.NoError(t, err) assert.Equal(t, payload, got) } // TestCacheExtractorGoCloudParallelTransfer exercises handleGoCloudURL with FF_USE_PARALLEL_CACHE_TRANSFER, range probe, and parallel NewRangeReader + WriteAt. func TestCacheExtractorGoCloudParallelTransfer(t *testing.T) { t.Setenv(featureflags.UseParallelCacheTransfer, "true") cdTempDir(t) removeHook := helpers.MakeWarningToPanic() t.Cleanup(removeHook) mux, tmpDir := setupGoCloudFileBucket(t, "testblob") testFile := path.Join(tmpDir, cacheExtractorArchive) writeZipFileAndMetadata(t, testFile) info, err := os.Stat(testFile) require.NoError(t, err) require.Greater(t, info.Size(), int64(32), "object should be larger than test chunk size") cmd := CacheExtractorCommand{ File: cacheExtractorArchive, GoCloudURL: fmt.Sprintf("testblob://bucket/%s", cacheExtractorArchive), Timeout: 0, mux: mux, ChunkSize: 32, Concurrency: 4, } assert.NotPanics(t, func() { cmd.Execute(nil) }) assert.FileExists(t, cacheExtractorTestArchivedFile) assert.FileExists(t, cacheExtractorMetadata) data, err := os.ReadFile(cacheExtractorMetadata) require.NoError(t, err) assert.JSONEq(t, `{"blank":"","foo":"some foo"}`, string(data)) } func TestGoCloudURLSchemeAssumesRangeSupport(t *testing.T) { t.Parallel() assert.True(t, goCloudURLSchemeAssumesRangeSupport("s3")) assert.True(t, goCloudURLSchemeAssumesRangeSupport("S3")) assert.True(t, goCloudURLSchemeAssumesRangeSupport("gs")) assert.True(t, goCloudURLSchemeAssumesRangeSupport("azblob")) assert.False(t, goCloudURLSchemeAssumesRangeSupport("testblob")) assert.False(t, goCloudURLSchemeAssumesRangeSupport("file")) } // TestUseParallelCacheTransferEnv checks env parsing for the feature flag. Parallel download wiring is covered by // TestCacheExtractorPresignedParallelTransfer and TestCacheExtractorGoCloudParallelTransfer. func TestUseParallelCacheTransferEnv(t *testing.T) { logger := logrus.WithField("name", featureflags.UseParallelCacheTransfer) t.Run("unset", func(t *testing.T) { t.Setenv(featureflags.UseParallelCacheTransfer, "") assert.False(t, featureflags.IsOn(logger, os.Getenv(featureflags.UseParallelCacheTransfer))) }) t.Run("false", func(t *testing.T) { t.Setenv(featureflags.UseParallelCacheTransfer, "false") assert.False(t, featureflags.IsOn(logger, os.Getenv(featureflags.UseParallelCacheTransfer))) }) t.Run("true", func(t *testing.T) { t.Setenv(featureflags.UseParallelCacheTransfer, "true") assert.True(t, featureflags.IsOn(logger, os.Getenv(featureflags.UseParallelCacheTransfer))) }) } ================================================ FILE: commands/helpers/cache_init.go ================================================ package helpers import ( "os" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" ) // CacheInitCommand will take a single directory/file path and initialize it // correctly for it to be used for cache. This command tries to support spaces // in directories name by using the flags to specify which entries you want // to initialize. type CacheInitCommand struct{} func NewCacheInitCommand() cli.Command { return common.NewCommand("cache-init", "changed permissions for cache paths (internal)", &CacheInitCommand{}) } func (c *CacheInitCommand) Execute(ctx *cli.Context) { if ctx.NArg() == 0 { logrus.Fatal("No arguments passed, at least 1 path is required.") } for _, path := range ctx.Args() { err := os.Chmod(path, os.ModePerm) if err != nil { logrus.WithError(err).Error("failed to chmod path") } } } ================================================ FILE: commands/helpers/cache_init_integration_test.go ================================================ //go:build integration package helpers_test import ( "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers" testHelpers "gitlab.com/gitlab-org/gitlab-runner/helpers" ) func newCacheInitTestApp() *cli.App { cmd := &helpers.CacheInitCommand{} app := cli.NewApp() app.Name = filepath.Base(os.Args[0]) app.Commands = append(app.Commands, cli.Command{ Name: "cache-init", Action: cmd.Execute, }) return app } func TestCacheInit(t *testing.T) { dir := t.TempDir() // Make sure that the mode is not the expected 0777. err := os.Chmod(dir, 0600) require.NoError(t, err) // Start a new cli with the arguments for the command. args := []string{os.Args[0], "cache-init", dir} err = newCacheInitTestApp().Run(args) require.NoError(t, err) info, err := os.Stat(dir) require.NoError(t, err) assert.Equal(t, os.ModeDir+os.ModePerm, info.Mode()) } func TestCacheInit_NoArguments(t *testing.T) { removeHook := testHelpers.MakeFatalToPanic() defer removeHook() args := []string{os.Args[0], "cache-init"} assert.Panics(t, func() { _ = newCacheInitTestApp().Run(args) }) } ================================================ FILE: commands/helpers/cache_metadata.go ================================================ package helpers import ( "encoding/json" "fmt" "net/http" "net/textproto" "os" "path/filepath" "strings" ) const ( // cacheMetadataFileName is the basename of the local metadata file, to be dropped alongside the actual archive cacheMetadataFileName = "metadata.json" ) // writeCacheMetadataFile dumps a file alongside the archive, holding all metadata. Before writing, the metadata keys // are normalized with [normalizeMetadataKey]. func writeCacheMetadataFile(archiveFilePath string, metadata map[string]string) error { normalized := map[string]string{} for k, v := range metadata { if k == "" { continue } normalized[normalizeCacheMetadataKey(k)] = v } // json.Marshal won't ever fail for map[string]string data, _ := json.Marshal(normalized) file := filepath.Join(filepath.Dir(archiveFilePath), cacheMetadataFileName) if err := os.WriteFile(file, data, 0640); err != nil { return fmt.Errorf("writing metadata file: %w", err) } return nil } // normalizeCacheMetadataKey normalizes a metadata key. This is done to be consistent, regardless where the metadata // actually came from (e.g.: user defined for uploads, from http headers for downloads) or which cloud providers are at // play. func normalizeCacheMetadataKey(key string) string { return strings.ToLower(textproto.CanonicalMIMEHeaderKey(key)) } // headersToCacheMetadata pulls out metadata from well-known http response headers. func headersToCacheMetadata(headers http.Header) map[string]string { metadata := map[string]string{} for headerKey := range headers { metaKey, ok := extractCacheMetadataKey(headerKey) if !ok { continue } metadata[metaKey] = headers.Get(headerKey) } return metadata } // extractCacheMetadataKey checks if headerKey looks like a http response header key for metadata, ie. something like // the headers below. If so, the actual metadata key is returned. // This is best-effort, at the time we pull caches with a pre-signed URL, we don't have any other information, and we // don't have creds to actually ask the cloud provider for metadata. // // http headers for metadata look something like: // - X-Goog-Meta-something... // - X-Amz-Meta-something... func extractCacheMetadataKey(headerKey string) (string, bool) { parts := strings.Split(headerKey, "-") if len(parts) < 4 { return "", false } isMetadataHeader := (strings.EqualFold(parts[0], "x") && strings.EqualFold(parts[2], "meta")) if isMetadataHeader { return strings.Join(parts[3:], "-"), true } return "", false } ================================================ FILE: commands/helpers/cache_metadata_test.go ================================================ //go:build !integration package helpers import ( "fmt" "os" "path/filepath" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestWriteCacheMetadataFile(t *testing.T) { tests := map[string]struct { metadata map[string]string archiveFile string expectWriteError bool expectedBlob string }{ "no metadata": { archiveFile: "archive.zip", expectedBlob: "{}", }, "no archive": { expectedBlob: "{}", }, "bubbles up write errors": { archiveFile: "some/path/which/does/not/exist/archive.zip", expectWriteError: true, }, "canonicalizes metadata keys": { metadata: map[string]string{ "FoO": "some Foo", "BAR": "some Bar", "": "nope", }, expectedBlob: `{"bar":"some Bar","foo":"some Foo"}`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { dir := t.TempDir() archiveFile := filepath.Join(dir, test.archiveFile) expectedMetadataFile := filepath.Join(filepath.Dir(archiveFile), "metadata.json") err := writeCacheMetadataFile(archiveFile, test.metadata) if test.expectWriteError { msg := "writing metadata file: open %s:" require.ErrorContains(t, err, fmt.Sprintf(msg, expectedMetadataFile)) return } require.NoError(t, err) b, err := os.ReadFile(expectedMetadataFile) require.NoError(t, err, "reading metadata file") assert.Equal(t, test.expectedBlob, string(b), "metadata file content") }) } } ================================================ FILE: commands/helpers/file_archiver.go ================================================ package helpers import ( "bufio" "bytes" "errors" "fmt" "io" "os" "os/exec" "path/filepath" "sort" "strings" "time" "github.com/bmatcuk/doublestar/v4" "github.com/sirupsen/logrus" ) type fileArchiver struct { Paths []string `long:"path" description:"Add paths to archive"` Exclude []string `long:"exclude" description:"Exclude paths from the archive"` Untracked bool `long:"untracked" description:"Add git untracked files"` Verbose bool `long:"verbose" description:"Detailed information"` wd string files map[string]os.FileInfo excluded map[string]int64 } func (c *fileArchiver) isChanged(modTime time.Time) bool { for _, info := range c.files { if modTime.Before(info.ModTime()) { return true } } return false } func (c *fileArchiver) isFileChanged(fileName string) bool { ai, err := os.Stat(fileName) if ai != nil { if !c.isChanged(ai.ModTime()) { return false } } else if !os.IsNotExist(err) { logrus.Warningln(err) } return true } func (c *fileArchiver) sortedFiles() []string { files := make([]string, len(c.files)) i := 0 for file := range c.files { files[i] = file i++ } sort.Strings(files) return files } func (c *fileArchiver) process(match string) bool { var absolute, relative string var err error absolute, err = filepath.Abs(match) if err == nil { // Let's try to find a real relative path to an absolute from working directory relative, err = filepath.Rel(c.wd, absolute) } if err == nil { // Process path only if it lives in our build directory if !strings.HasPrefix(relative, ".."+string(filepath.Separator)) { excluded, rule := c.isExcluded(relative) if excluded { c.exclude(rule) return false } err = c.add(relative) } else { err = errors.New("not supported: outside build directory") } } if err == nil { return true } if os.IsNotExist(err) { // We hide the error that file doesn't exist return false } logrus.Warningf("%s: %v", match, err) return false } func (c *fileArchiver) isExcluded(path string) (bool, string) { // Both path and pattern need to be normalized with filepath.ToSlash(). // Matching will fail with Windows machines using "\\" path separators and patterns with "/" path separators path = filepath.ToSlash(path) for _, pattern := range c.Exclude { relPattern, err := c.findRelativePathInProject(pattern) if err != nil { logrus.Warningf("isExcluded: %v", err.Error()) return false, "" } relPattern = filepath.ToSlash(relPattern) excluded, err := doublestar.Match(relPattern, path) if err == nil && excluded { return true, pattern } } return false, "" } func (c *fileArchiver) exclude(rule string) { c.excluded[rule]++ } func (c *fileArchiver) add(path string) error { // Always use slashes path = filepath.ToSlash(path) // Check if file exist info, err := os.Lstat(path) if err == nil { c.files[path] = info } return err } func (c *fileArchiver) processPaths() { for _, path := range c.Paths { c.processPath(path) } } func (c *fileArchiver) processPath(path string) { if path == "" { logrus.Warningf("No matching files. Path is empty.") return } rel, err := c.findRelativePathInProject(path) if err != nil { // Do not fail job when a file is invalid or not found. logrus.Warningf("processPath: %v", err.Error()) return } // Use WithNoFollow option to prevent symlink cycles during the initial glob matches, err := doublestar.FilepathGlob(rel, doublestar.WithNoFollow()) if err != nil { logrus.Warningf("%s: %v", path, err) return } found := 0 for _, match := range matches { err := filepath.Walk(match, func(path string, info os.FileInfo, err error) error { if c.process(path) { found++ } return nil }) if err != nil { logrus.Warningln("Walking", match, err) } } if found == 0 { logrus.Warningf( "%s: no matching files. Ensure that the artifact path is relative to the working directory (%s)", path, c.wd, ) } else { logrus.Infof("%s: found %d matching artifact files and directories", path, found) } } func (c *fileArchiver) findRelativePathInProject(path string) (string, error) { slashPath := filepath.ToSlash(path) if filepath.Clean(slashPath) == filepath.Clean(c.wd) { return ".", nil } base, patt := slashPath, "" // check if path contains a glob pattern if strings.ContainsAny(slashPath, "*?[{") { base, patt = doublestar.SplitPattern(slashPath) } abs, err := filepath.Abs(base) if err != nil { return "", fmt.Errorf("could not resolve artifact absolute path %s: %w", path, err) } rel, err := filepath.Rel(c.wd, abs) if err != nil { return "", fmt.Errorf("could not resolve artifact relative path %s: %w", path, err) } // If fully resolved relative path begins with ".." it is not a subpath of our working directory if strings.HasPrefix(rel, ".."+string(filepath.Separator)) || rel == ".." { return "", fmt.Errorf("artifact path is not a subpath of project directory: %s", path) } // Relative path is needed now that our fsys "root" is at the working directory rel = filepath.Join(rel, patt) rel = filepath.FromSlash(rel) return rel, nil } func (c *fileArchiver) processUntracked() { if !c.Untracked { return } found := 0 var output bytes.Buffer cmd := exec.Command("git", "ls-files", "-o", "-z") cmd.Env = os.Environ() cmd.Stdout = &output cmd.Stderr = os.Stderr logrus.Debugln("Executing command:", strings.Join(cmd.Args, " ")) err := cmd.Run() if err != nil { logrus.Warningf("untracked: %v", err) return } reader := bufio.NewReader(&output) for { line, err := reader.ReadString(0) if err == io.EOF { break } else if err != nil { logrus.Warningln(err) break } if c.process(line[:len(line)-1]) { found++ } } if found == 0 { logrus.Warningf("untracked: no files") } else { logrus.Infof("untracked: found %d files", found) } } func (c *fileArchiver) enumerate() error { wd, err := os.Getwd() if err != nil { return fmt.Errorf("failed to get current working directory: %w", err) } c.wd = wd c.files = make(map[string]os.FileInfo) c.excluded = make(map[string]int64) c.processPaths() c.processUntracked() for path, count := range c.excluded { logrus.Infof("%s: excluded %d files", path, count) } return nil } ================================================ FILE: commands/helpers/file_archiver_integration_test.go ================================================ //go:build integration package helpers_test import ( "fmt" "maps" "os" "path/filepath" "slices" "strings" "testing" "time" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers" ) func newFileArchiveInitTestApp(file string, paths []string) (*cli.App, *helpers.CacheArchiverCommand) { cmd := helpers.NewCacheArchiverCommandForTest(file, paths) app := cli.NewApp() app.Name = filepath.Base(os.Args[0]) app.Commands = append(app.Commands, cli.Command{ Name: "cache-archiver", Action: cmd.Execute, }) return app, &cmd } func TestFileArchiver(t *testing.T) { var err error // Create a temporary directory to hold our project parentDir, err := os.Getwd() require.NoError(t, err, "Error retrieving working directory") dir := filepath.Join( parentDir, fmt.Sprintf("test-%s-%s", t.Name(), time.Now().Format("20060102-150405.000")), ) err = os.MkdirAll(dir, 0755) require.NoError(t, err, "Error creating directory") archive := fmt.Sprintf("%s.%s", dir, "zip") paths := []string{"**/project"} t.Cleanup(func() { t.Logf("Removing temporary directory: %s", dir) os.RemoveAll(dir) t.Logf("Removing archive: %s", archive) os.RemoveAll(archive) }) files := setupEnvironment(t, fmt.Sprintf("%s", parentDir), dir) // Start a new cli with the arguments for the command. args := []string{os.Args[0], "cache-archiver"} app, cmd := newFileArchiveInitTestApp(archive, paths) err = app.Run(args) matches := helpers.GetMatches(cmd) require.ElementsMatch(t, files, slices.Collect(maps.Keys(matches)), "Elements in archive don't match with expected") } func setupEnvironment(t *testing.T, parentDir, dir string) []string { t.Helper() t.Logf("Creating project structure in: %s", dir) // Define project root path projectRoot := filepath.Join(dir, "project") dirs := []string{ projectRoot, filepath.Join(projectRoot, "folder1"), filepath.Join(projectRoot, "folder1", "subfolder"), filepath.Join(projectRoot, "folder2"), filepath.Join(projectRoot, "folder3"), filepath.Join(projectRoot, "selfreferential"), } for _, d := range dirs { err := os.MkdirAll(d, 0755) require.NoError(t, err, "Error creating directory") } files := []string{ filepath.Join(projectRoot, "folder1", "file1.txt"), filepath.Join(projectRoot, "folder1", "subfolder", "data.csv"), filepath.Join(projectRoot, "folder2", "file2.txt"), filepath.Join(projectRoot, "folder2", "report.csv"), filepath.Join(projectRoot, "folder3", "file3.csv"), } for _, f := range files { createFile(t, f) } symlinks := []struct{ target, link string }{ {"../folder2", filepath.Join(projectRoot, "folder1", "loop")}, {"../folder1/subfolder", filepath.Join(projectRoot, "folder2", "subfolder")}, {"../../folder1", filepath.Join(projectRoot, "folder1", "subfolder", "back")}, {"../folder3", filepath.Join(projectRoot, "folder2", "another")}, {"../folder1", filepath.Join(projectRoot, "folder3", "link_to_folder1")}, {".", filepath.Join(projectRoot, "selfreferential", "myself")}, } for _, s := range symlinks { err := os.Symlink(s.target, s.link) require.NoError(t, err, "Error creating symlink") } var createdPaths []string allPaths := append(dirs, files...) for _, s := range symlinks { allPaths = append(allPaths, s.link) } for _, path := range allPaths { relPath := trimPrefixes(path, parentDir, "/", "\\") createdPaths = append(createdPaths, strings.ReplaceAll(relPath, "\\", "/")) } return createdPaths } func trimPrefixes(s string, prefixes ...string) string { for _, prefix := range prefixes { if strings.HasPrefix(s, prefix) { s = strings.TrimPrefix(s, prefix) } } return s } func createFile(t *testing.T, path string) { t.Helper() file, err := os.Create(path) require.NoError(t, err, "creating file %q", path) require.NoError(t, file.Close(), "closing file %q", path) } ================================================ FILE: commands/helpers/file_archiver_test.go ================================================ //go:build !integration package helpers import ( "os" "path/filepath" "strings" "testing" "time" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) const ( fileArchiverUntrackedFile = "untracked_test_file.txt" fileArchiverArchiveZipFile = "archive.zip" fileArchiverNotExistingFile = "not_existing_file.txt" fileArchiverAbsoluteFile = "/absolute.txt" fileArchiverAbsoluteDoubleStarFile = "/**/absolute.txt" fileArchiverRelativeFile = "../../../relative.txt" ) func TestGlobbedFilePath(t *testing.T) { // Set up directories used in all test cases const ( fileArchiverGlobPath = "foo/bar/baz" fileArchiverGlobPath2 = "foo/bar/baz2" ) err := os.MkdirAll(fileArchiverGlobPath, 0700) require.NoError(t, err, "Creating directory path: %s", fileArchiverGlobPath) defer os.RemoveAll(strings.Split(fileArchiverGlobPath, "/")[0]) err = os.MkdirAll(fileArchiverGlobPath2, 0700) require.NoError(t, err, "Creating directory path: %s", fileArchiverGlobPath2) defer os.RemoveAll(strings.Split(fileArchiverGlobPath2, "/")[0]) // Write a dir that is outside any glob patterns const ( fileArchiverGlobNonMatchingPath = "bar/foo" ) err = os.MkdirAll(fileArchiverGlobNonMatchingPath, 0700) writeTestFile(t, "bar/foo/test.txt") require.NoError(t, err, "Creating directory path: %s", fileArchiverGlobNonMatchingPath) defer os.RemoveAll(strings.Split(fileArchiverGlobNonMatchingPath, "/")[0]) workingDirectory, err := os.Getwd() require.NoError(t, err) testCases := map[string]struct { paths []string exclude []string // files that will be created and matched by the patterns expectedMatchingFiles []string // directories that will be matched by the patterns expectedMatchingDirs []string // files that will be created but will not be matched nonMatchingFiles []string // directories that will not be matched by the patterns nonMatchingDirs []string // files that are excluded by Exclude patterns excludedFilesCount int64 warningLog string }{ "find nothing with empty path": { paths: []string{""}, nonMatchingFiles: []string{ "file.txt", "foo/file.txt", "foo/bar/file.txt", "foo/bar/baz/file.txt", "foo/bar/baz/file.extra.dots.txt", }, warningLog: "No matching files. Path is empty.", }, "files by extension at several depths": { paths: []string{"foo/**/*.txt"}, expectedMatchingFiles: []string{ "foo/file.txt", "foo/bar/file.txt", "foo/bar/baz/file.txt", "foo/bar/baz/file.extra.dots.txt", }, nonMatchingFiles: []string{ "foo/file.txt.md", "foo/bar/file.txt.md", "foo/bar/baz/file.txt.md", "foo/bar/baz/file.extra.dots.txt.md", }, }, "files by extension at several depths - with exclude": { paths: []string{"foo/**/*.txt"}, exclude: []string{"foo/**/xfile.txt"}, expectedMatchingFiles: []string{ "foo/file.txt", "foo/bar/file.txt", "foo/bar/baz/file.txt", }, nonMatchingFiles: []string{ "foo/xfile.txt", "foo/bar/xfile.txt", "foo/bar/baz/xfile.txt", }, excludedFilesCount: 3, }, "double slash matches a single slash": { paths: []string{"foo//*.txt"}, expectedMatchingFiles: []string{ "foo/file.txt", }, nonMatchingFiles: []string{ "foo/bar/file.txt", "foo/bar/baz/file.txt", }, }, "double slash matches a single slash - with exclude": { paths: []string{"foo//*.txt"}, exclude: []string{"foo//*2.txt"}, expectedMatchingFiles: []string{ "foo/file.txt", }, nonMatchingFiles: []string{ "foo/file2.txt", "foo/bar/file.txt", }, excludedFilesCount: 1, }, "absolute path to working directory": { paths: []string{filepath.Join(workingDirectory, "*.thing")}, expectedMatchingFiles: []string{ "file.thing", }, nonMatchingFiles: []string{ "foo/file.thing", "foo/bar/file.thing", "foo/bar/baz/file.thing", }, }, "absolute path to working directory - with exclude": { paths: []string{filepath.Join(workingDirectory, "*.thing")}, exclude: []string{filepath.Join(workingDirectory, "*2.thing")}, expectedMatchingFiles: []string{ "file.thing", }, nonMatchingFiles: []string{ "file2.thing", }, excludedFilesCount: 1, }, "absolute path to nested directory": { paths: []string{filepath.Join(workingDirectory, "foo/bar/*.bin")}, expectedMatchingFiles: []string{ "foo/bar/file.bin", }, nonMatchingFiles: []string{ "foo/bar/file.txt", "foo/bar/baz/file.bin", }, }, "absolute path to nested directory - with exclude": { paths: []string{filepath.Join(workingDirectory, "foo/bar/*.bin")}, exclude: []string{filepath.Join(workingDirectory, "foo/bar/*2.bin")}, expectedMatchingFiles: []string{ "foo/bar/file.bin", }, nonMatchingFiles: []string{ "foo/bar/file2.bin", "foo/bar/file.txt", "foo/bar/baz/file.bin", }, excludedFilesCount: 1, }, "double slash and multiple stars - must be at least two dirs deep": { paths: []string{"./foo/**//*/*.*"}, expectedMatchingFiles: []string{ "foo/bar/file.bin", "foo/bar/file.txt", "foo/bar/baz/file.bin", "foo/bar/baz/file.txt", "foo/bar/baz2/file.bin", "foo/bar/baz2/file.txt", }, nonMatchingFiles: []string{ "foo/file.txt", }, }, "double slash and multiple stars - must be at least two dirs deep - with exclude": { paths: []string{"./foo/**//*/*.*"}, exclude: []string{"**/*.bin"}, expectedMatchingFiles: []string{ "foo/bar/file.txt", "foo/bar/baz/file.txt", "foo/bar/baz2/file.txt", }, nonMatchingFiles: []string{ "foo/file.txt", "foo/bar/file.bin", "foo/bar/baz/file.bin", "foo/bar/baz2/file.bin", }, excludedFilesCount: 3, }, "all the files": { paths: []string{"foo/**/*.*"}, expectedMatchingFiles: []string{ "foo/file.bin", "foo/file.txt", "foo/bar/file.bin", "foo/bar/file.txt", "foo/bar/baz/file.bin", "foo/bar/baz/file.txt", "foo/bar/baz2/file.bin", "foo/bar/baz2/file.txt", }, nonMatchingFiles: []string{}, }, "all the files - with exclude": { paths: []string{"foo/**/*.*"}, exclude: []string{"**/*.bin", "**/*even-this*"}, expectedMatchingFiles: []string{ "foo/file.txt", "foo/bar/file.txt", "foo/bar/baz/file.txt", "foo/bar/baz2/file.txt", }, nonMatchingFiles: []string{ "foo/wow-even-this.go", "foo/file.bin", "foo/bar/file.bin", "foo/bar/baz/file.bin", "foo/bar/baz2/file.bin", }, excludedFilesCount: 5, }, "all the things - dirs included": { paths: []string{"foo/**"}, expectedMatchingFiles: []string{ "foo/file.bin", "foo/file.txt", "foo/bar/file.bin", "foo/bar/file.txt", "foo/bar/baz/file.bin", "foo/bar/baz/file.txt", "foo/bar/baz2/file.bin", "foo/bar/baz2/file.txt", }, expectedMatchingDirs: []string{ "foo", "foo/bar", "foo/bar/baz", "foo/bar/baz2", }, nonMatchingFiles: []string{ "root.txt", }, }, "relative path that leaves project and returns": { paths: []string{filepath.Join("..", filepath.Base(workingDirectory), "foo/*.txt")}, expectedMatchingFiles: []string{ "foo/file.txt", }, }, "relative path that leaves project and returns - with exclude": { paths: []string{filepath.Join("..", filepath.Base(workingDirectory), "foo/*.txt")}, exclude: []string{filepath.Join("..", filepath.Base(workingDirectory), "foo/*2.txt")}, expectedMatchingFiles: []string{ "foo/file.txt", }, nonMatchingFiles: []string{ "foo/file2.txt", }, excludedFilesCount: 1, }, "invalid path": { paths: []string{">/**"}, warningLog: "no matching files. Ensure that the artifact path is relative to the working directory", }, "cancel out everything": { paths: []string{"**"}, exclude: []string{"**"}, warningLog: "no matching files. Ensure that the artifact path is relative to the working directory", }, } for testName, tc := range testCases { t.Run(testName, func(t *testing.T) { h := newLogHook(logrus.WarnLevel) logrus.AddHook(&h) for _, f := range tc.expectedMatchingFiles { writeTestFile(t, f) } for _, f := range tc.nonMatchingFiles { writeTestFile(t, f) } f := fileArchiver{ Paths: tc.paths, Exclude: tc.exclude, } err = f.enumerate() assert.NoError(t, err) sortedFiles := f.sortedFiles() assert.Len(t, sortedFiles, len(tc.expectedMatchingFiles)+len(tc.expectedMatchingDirs)) for _, p := range tc.expectedMatchingFiles { assert.Contains(t, f.sortedFiles(), p) } for _, p := range tc.expectedMatchingDirs { assert.Contains(t, f.sortedFiles(), p) } var excludedFilesCount int64 for _, v := range f.excluded { excludedFilesCount += v } if tc.excludedFilesCount > 0 { assert.Equal(t, tc.excludedFilesCount, excludedFilesCount) } if tc.warningLog != "" { require.Len(t, h.entries, 1) assert.Contains(t, h.entries[0].Message, tc.warningLog) } // remove test files from this test case // deferred removal will still happen if needed in the os.RemoveAll call above for _, f := range tc.expectedMatchingFiles { removeTestFile(t, f) } for _, f := range tc.nonMatchingFiles { removeTestFile(t, f) } }) } } func TestExcludedFilePaths(t *testing.T) { const fooTestDirectory = "foo/test/bar/baz" err := os.MkdirAll(fooTestDirectory, 0700) require.NoError(t, err, "could not create test directory") defer os.RemoveAll(strings.Split(fooTestDirectory, "/")[0]) existingFiles := []string{ "foo/test/bar/baz/1.txt", "foo/test/bar/baz/1.md", "foo/test/bar/baz/2.txt", "foo/test/bar/baz/2.md", "foo/test/bar/baz/3.txt", } for _, f := range existingFiles { writeTestFile(t, f) } f := fileArchiver{ Paths: []string{"foo/test/"}, Exclude: []string{"foo/test/bar/baz/3.txt", "foo/**/*.md"}, } err = f.enumerate() includedFiles := []string{ "foo/test", "foo/test/bar", "foo/test/bar/baz", "foo/test/bar/baz/1.txt", "foo/test/bar/baz/2.txt", } assert.NoError(t, err) assert.Equal(t, includedFiles, f.sortedFiles()) assert.Equal(t, 2, len(f.excluded)) require.Contains(t, f.excluded, "foo/test/bar/baz/3.txt") assert.Equal(t, int64(1), f.excluded["foo/test/bar/baz/3.txt"]) require.Contains(t, f.excluded, "foo/**/*.md") assert.Equal(t, int64(2), f.excluded["foo/**/*.md"]) } func Test_isExcluded(t *testing.T) { testCases := map[string]struct { pattern string path string match bool log string }{ `direct match`: { pattern: "file.txt", path: "file.txt", match: true, }, `pattern matches`: { pattern: "**/*.txt", path: "foo/bar/file.txt", match: true, }, `no match - pattern not in project`: { pattern: "../*.*", path: "file.txt", match: false, log: "isExcluded: artifact path is not a subpath of project directory: ../*.*", }, `no match - absolute pattern not in project`: { pattern: "/foo/file.txt", path: "file.txt", match: false, log: "isExcluded: artifact path is not a subpath of project directory: /foo/file.txt", }, } workingDirectory, err := os.Getwd() require.NoError(t, err) for testName, tc := range testCases { t.Run(testName, func(t *testing.T) { f := fileArchiver{ wd: workingDirectory, Exclude: []string{tc.pattern}, } h := newLogHook(logrus.WarnLevel) logrus.AddHook(&h) isExcluded, rule := f.isExcluded(tc.path) assert.Equal(t, tc.match, isExcluded) if tc.match { assert.Equal(t, tc.pattern, rule) } else { assert.Empty(t, rule) } if tc.log != "" { require.Len(t, h.entries, 1) assert.Contains(t, h.entries[0].Message, tc.log) } }) } } func TestCacheArchiverAddingUntrackedFiles(t *testing.T) { writeTestFile(t, artifactsTestArchivedFile) defer os.Remove(artifactsTestArchivedFile) writeTestFile(t, artifactsTestArchivedFile2) defer os.Remove(artifactsTestArchivedFile2) f := fileArchiver{ Untracked: true, } err := f.enumerate() assert.NoError(t, err) assert.Len(t, f.sortedFiles(), 2) assert.Contains(t, f.sortedFiles(), artifactsTestArchivedFile) assert.Contains(t, f.sortedFiles(), artifactsTestArchivedFile2) } func TestCacheArchiverAddingUntrackedUnicodeFiles(t *testing.T) { const fileArchiverUntrackedUnicodeFile = "неотслеживаемый_тестовый_файл.txt" writeTestFile(t, fileArchiverUntrackedUnicodeFile) defer os.Remove(fileArchiverUntrackedUnicodeFile) f := fileArchiver{ Untracked: true, } err := f.enumerate() assert.NoError(t, err) assert.Len(t, f.sortedFiles(), 1) assert.Contains(t, f.sortedFiles(), fileArchiverUntrackedUnicodeFile) } func TestCacheArchiverAddingFile(t *testing.T) { writeTestFile(t, fileArchiverUntrackedFile) defer os.Remove(fileArchiverUntrackedFile) f := fileArchiver{ Paths: []string{fileArchiverUntrackedFile}, } err := f.enumerate() assert.NoError(t, err) assert.Len(t, f.sortedFiles(), 1) assert.Contains(t, f.sortedFiles(), fileArchiverUntrackedFile) } func TestFileArchiverToFailOnAbsoluteFile(t *testing.T) { f := fileArchiver{ Paths: []string{fileArchiverAbsoluteFile}, } h := newLogHook(logrus.WarnLevel) logrus.AddHook(&h) err := f.enumerate() assert.NoError(t, err) assert.Empty(t, f.sortedFiles()) require.Len(t, h.entries, 1) assert.Contains(t, h.entries[0].Message, "artifact path is not a subpath of project directory") } func TestFileArchiverToSucceedOnAbsoluteFileInProject(t *testing.T) { path, err := os.Getwd() require.NoError(t, err) fpath := filepath.Join(path, "file.txt") writeTestFile(t, fpath) defer os.Remove(fpath) f := fileArchiver{ Paths: []string{fpath}, } err = f.enumerate() assert.NoError(t, err) assert.Len(t, f.sortedFiles(), 1) } func TestFileArchiverToNotAddFilePathOutsideProjectDirectory(t *testing.T) { f := fileArchiver{ Paths: []string{fileArchiverAbsoluteDoubleStarFile}, } h := newLogHook(logrus.WarnLevel) logrus.AddHook(&h) err := f.enumerate() assert.NoError(t, err) assert.Empty(t, f.sortedFiles()) require.Len(t, h.entries, 1) assert.Contains(t, h.entries[0].Message, "artifact path is not a subpath of project directory") } func TestFileArchiverToFailOnRelativeFile(t *testing.T) { f := fileArchiver{ Paths: []string{fileArchiverRelativeFile}, } h := newLogHook(logrus.WarnLevel) logrus.AddHook(&h) err := f.enumerate() assert.NoError(t, err) assert.Empty(t, f.sortedFiles()) require.Len(t, h.entries, 1) assert.Contains(t, h.entries[0].Message, "artifact path is not a subpath of project directory") } func TestFileArchiver_pathIsInProject(t *testing.T) { wd, err := os.Getwd() assert.NoError(t, err) c := &fileArchiver{ wd: wd, } testCases := map[string]struct { path string inProject bool errorExpected bool }{ `empty path not in project`: { path: "", inProject: false, }, `relative path in project`: { path: "in/the/project/for/realzy", inProject: true, }, `relative path not in project`: { path: "../nope", inProject: false, errorExpected: true, }, `relative path to parent directory with pattern - not in project`: { path: "../*.*", inProject: false, errorExpected: true, }, `absolute path in project`: { path: filepath.Join(wd, "yo/i/am/in"), inProject: true, }, `absolute path not in project`: { path: "/totally/not/in/the/project", inProject: false, errorExpected: true, }, `absolute path to working directory in project`: { path: wd, inProject: true, }, `relative path to working directory in project`: { path: filepath.Join("..", filepath.Base(wd)), inProject: true, }, `absolute path to working directory in project with trailing slash`: { path: wd + "/", inProject: true, }, `relative path to working directory in project with trailing slash`: { path: filepath.Join("..", filepath.Base(wd)) + "/", inProject: true, }, } for n, tc := range testCases { t.Run(n, func(t *testing.T) { _, err := c.findRelativePathInProject(tc.path) if tc.errorExpected { assert.Error(t, err) return } assert.NoError(t, err) }) } } func TestFileArchiverToAddNotExistingFile(t *testing.T) { f := fileArchiver{ Paths: []string{fileArchiverNotExistingFile}, } err := f.enumerate() assert.NoError(t, err) assert.Empty(t, f.sortedFiles()) } func TestFileArchiverChanged(t *testing.T) { writeTestFile(t, fileArchiverUntrackedFile) defer os.Remove(fileArchiverUntrackedFile) now := time.Now() require.NoError(t, os.Chtimes(fileArchiverUntrackedFile, now, now.Add(-time.Second))) f := fileArchiver{ Paths: []string{fileArchiverUntrackedFile}, } err := f.enumerate() require.NoError(t, err) assert.Len(t, f.sortedFiles(), 1) assert.False(t, f.isChanged(now.Add(time.Minute))) assert.True(t, f.isChanged(now.Add(-time.Minute))) } func TestFileArchiverFileIsNotChanged(t *testing.T) { now := time.Now() writeTestFile(t, fileArchiverUntrackedFile) defer os.Remove(fileArchiverUntrackedFile) writeTestFile(t, fileArchiverArchiveZipFile) defer os.Remove(fileArchiverArchiveZipFile) f := fileArchiver{ Paths: []string{fileArchiverUntrackedFile}, } err := f.enumerate() require.NoError(t, err) require.NoError(t, os.Chtimes(fileArchiverUntrackedFile, now, now.Add(-time.Second))) assert.False( t, f.isFileChanged(fileArchiverArchiveZipFile), "should return false if file was modified before the listed file", ) } func TestFileArchiverFileIsChanged(t *testing.T) { now := time.Now() writeTestFile(t, fileArchiverUntrackedFile) defer os.Remove(fileArchiverUntrackedFile) writeTestFile(t, fileArchiverArchiveZipFile) defer os.Remove(fileArchiverArchiveZipFile) f := fileArchiver{ Paths: []string{fileArchiverUntrackedFile}, } err := f.enumerate() require.NoError(t, err) require.NoError(t, os.Chtimes(fileArchiverArchiveZipFile, now, now.Add(-time.Minute))) assert.True(t, f.isFileChanged(fileArchiverArchiveZipFile), "should return true if file was modified") } func TestFileArchiverFileDoesNotExist(t *testing.T) { writeTestFile(t, fileArchiverUntrackedFile) defer os.Remove(fileArchiverUntrackedFile) f := fileArchiver{ Paths: []string{fileArchiverUntrackedFile}, } err := f.enumerate() require.NoError(t, err) assert.True( t, f.isFileChanged(fileArchiverNotExistingFile), "should return true if file doesn't exist", ) } func newLogHook(levels ...logrus.Level) logHook { return logHook{levels: levels} } type logHook struct { entries []*logrus.Entry levels []logrus.Level } func (s *logHook) Levels() []logrus.Level { return s.levels } func (s *logHook) Fire(entry *logrus.Entry) error { s.entries = append(s.entries, entry) return nil } ================================================ FILE: commands/helpers/health_check.go ================================================ package helpers import ( "context" "fmt" "net" "os" "strings" "sync" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" ) type HealthCheckCommand struct { ctx context.Context Ports []string `short:"p" long:"port" description:"Service port"` } func NewHealthCheckCommand() cli.Command { return common.NewCommand("health-check", "check health for a specific address", &HealthCheckCommand{}) } func (c *HealthCheckCommand) Execute(_ *cli.Context) { var ports []string var addr string var waitAll bool if c.ctx == nil { c.ctx = context.Background() } // If command-line ports were given, use those. Otherwise search the environment. The command-line // 'port' flag is used by the kubernetes executor, and in kubernetes the networking environment is // shared among all containers in the pod. So we use localhost instead of another tcp address. if len(c.Ports) > 0 { addr = "localhost" // The urfave/cli package gives us an unwanted trailing entry, which apparently contains the // concatenation of all the --port arguments. Elide it. ports = c.Ports[:len(c.Ports)-1] // For kubernetes port checks, wait for all services to respond. waitAll = true } else { for _, e := range os.Environ() { parts := strings.Split(e, "=") switch { case len(parts) != 2: continue case strings.HasSuffix(parts[0], "_TCP_ADDR"): addr = parts[1] case strings.HasSuffix(parts[0], "_TCP_PORT"): ports = append(ports, parts[1]) } } } if addr == "" || len(ports) == 0 { logrus.Fatalln("No HOST or PORT found") } fmt.Printf("waiting for TCP connection to %s on %v...\n", addr, ports) wg := sync.WaitGroup{} wg.Add(len(ports)) ctx, cancel := context.WithCancel(c.ctx) defer cancel() for _, port := range ports { go checkPort(ctx, addr, port, cancel, wg.Done, waitAll) } wg.Wait() } // checkPort will attempt to Dial the specified addr:port until successful. This function is intended to be run as a // go-routine and has the following exit criteria: // 1. A call to net.Dial is successful (i.e. does not return an error). A successful dial will also result in the // the passed context being cancelled. // 2. The passed context is cancelled. func checkPort(parentCtx context.Context, addr, port string, cancel func(), done func(), waitAll bool) { defer done() // If we're not awaiting all services, arrange to cancel the parent context as soon as // a dial succeeds. if !waitAll { defer cancel() } for { ctx, cancel := context.WithTimeout(parentCtx, 5*time.Minute) defer cancel() fmt.Printf("dialing %s:%s...\n", addr, port) conn, err := (&net.Dialer{}).DialContext(ctx, "tcp", net.JoinHostPort(addr, port)) if err != nil { if parentCtx.Err() != nil { return } time.Sleep(time.Second) continue } _ = conn.Close() fmt.Printf("dial succeeded on %s:%s. Exiting...\n", addr, port) return } } ================================================ FILE: commands/helpers/health_check_integration_test.go ================================================ //go:build integration package helpers import ( "context" "net" "os" "strconv" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/helpers" ) func TestServiceWaiterCommand_NoEnvironmentVariables(t *testing.T) { removeHook := helpers.MakeFatalToPanic() defer removeHook() // Make sure there are no env vars that match the pattern for _, e := range os.Environ() { if strings.Contains(e, "_TCP_") { err := os.Unsetenv(strings.Split(e, "=")[0]) require.NoError(t, err) } } cmd := HealthCheckCommand{} assert.Panics(t, func() { cmd.Execute(nil) }) } func TestHealthCheckCommand_Execute(t *testing.T) { cases := []struct { name string expectedConnect bool exposeHigher bool exposeLower bool }{ { name: "Successful connect", expectedConnect: true, exposeHigher: false, exposeLower: false, }, { name: "Unsuccessful connect because service is down", expectedConnect: false, exposeHigher: false, exposeLower: false, }, { name: "Successful connect with higher port exposed", expectedConnect: true, exposeHigher: true, exposeLower: false, }, { name: "Successful connect with lower port exposed", expectedConnect: true, exposeHigher: false, exposeLower: true, }, { name: "Successful connect with both lower and higher port exposed", expectedConnect: true, exposeHigher: true, exposeLower: true, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { os.Unsetenv("SERVICE_LOWER_TCP_PORT") os.Unsetenv("SERVICE_HIGHER_TCP_PORT") // Start listening to reverse addr listener, err := net.Listen("tcp", "127.0.0.1:") require.NoError(t, err) defer listener.Close() port := listener.Addr().(*net.TCPAddr).Port err = os.Setenv("SERVICE_TCP_ADDR", "127.0.0.1") require.NoError(t, err) err = os.Setenv("SERVICE_TCP_PORT", strconv.Itoa(port)) require.NoError(t, err) if c.exposeHigher { err = os.Setenv("SERVICE_HIGHER_TCP_PORT", strconv.Itoa(port+1)) require.NoError(t, err) } if c.exposeLower { err = os.Setenv("SERVICE_LOWER_TCP_PORT", strconv.Itoa(port-1)) require.NoError(t, err) } // If we don't expect to connect we close the listener. if !c.expectedConnect { listener.Close() } ctx, cancelFn := context.WithTimeout(context.Background(), 4*time.Second) defer cancelFn() done := make(chan struct{}) go func() { cmd := HealthCheckCommand{ctx: ctx} cmd.Execute(nil) done <- struct{}{} }() select { case <-ctx.Done(): if c.expectedConnect { require.Fail(t, "Timeout waiting to start service.") } case <-done: if !c.expectedConnect { require.Fail(t, "Expected to not connect to server") } } }) } } func TestHealthCheckCommand_WaitAll(t *testing.T) { // We might simulate as many as two services. const MAX_PORTS = 2 cases := []struct { name string successCount int expectedTimeout bool }{ { name: "Two services down", successCount: 0, expectedTimeout: true, }, { name: "One up one down", successCount: 1, expectedTimeout: true, }, { name: "Two services up", successCount: 2, expectedTimeout: false, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { ports := make([]string, 0) for i := 0; i < c.successCount; i++ { listener, err := net.Listen("tcp", "127.0.0.1:") require.NoError(t, err) defer listener.Close() port := listener.Addr().(*net.TCPAddr).Port ports = append(ports, strconv.Itoa(port)) } // To simulate services that are down, find an unused port and increment port // numbers from there. unusedPort := 0 if c.successCount < MAX_PORTS { listener, err := net.Listen("tcp", "127.0.0.1:") require.NoError(t, err) unusedPort = listener.Addr().(*net.TCPAddr).Port listener.Close() } for i := c.successCount; i < MAX_PORTS; i++ { ports = append(ports, strconv.Itoa(unusedPort)) unusedPort++ } // The cli package provides an extra value at end of the args array ports = append(ports, "[unused value]") ctx, cancelFn := context.WithTimeout(context.Background(), 4*time.Second) defer cancelFn() done := make(chan struct{}) go func() { cmd := HealthCheckCommand{ ctx: ctx, Ports: ports, } cmd.Execute(nil) done <- struct{}{} }() select { case <-ctx.Done(): if !c.expectedTimeout { require.Fail(t, "Unexpected timeout") } case <-done: if c.expectedTimeout { require.Fail(t, "Unexpected failure to time out") } } }) } } ================================================ FILE: commands/helpers/helpers_archiver_test.go ================================================ // Helper functions that are shared between unit tests and integration tests package helpers import ( "testing" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/fastzip" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/tarzstd" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive/ziplegacy" ) func OnEachArchiver(t *testing.T, f func(t *testing.T, format archive.Format)) { archivers := map[string]struct { format archive.Format archiver archive.NewArchiverFunc extractor archive.NewExtractorFunc }{ "fastzip->legacy": {archive.Zip, fastzip.NewArchiver, ziplegacy.NewExtractor}, "fastzip->fastzip": {archive.Zip, fastzip.NewArchiver, fastzip.NewExtractor}, "zstd->legacy": {archive.ZipZstd, fastzip.NewZstdArchiver, ziplegacy.NewExtractor}, "zstd->fastzip": {archive.ZipZstd, fastzip.NewZstdArchiver, fastzip.NewExtractor}, "tarzstd": {archive.TarZstd, tarzstd.NewArchiver, tarzstd.NewExtractor}, } for name, a := range archivers { t.Run(name, func(t *testing.T) { prevArchiver, prevExtractor := archive.Register(a.format, a.archiver, a.extractor) t.Cleanup(func() { archive.Register(a.format, prevArchiver, prevExtractor) }) f(t, a.format) }) } } func OnEachZipArchiver(t *testing.T, f func(t *testing.T), include ...string) { archivers := map[string]archive.NewArchiverFunc{ "legacy": ziplegacy.NewArchiver, "fastzip": fastzip.NewArchiver, } for name, archiver := range archivers { if !hasArchiver(name, include) { continue } t.Run(name, func(t *testing.T) { prevArchiver, prevExtractor := archive.Register(archive.Zip, archiver, ziplegacy.NewExtractor) t.Cleanup(func() { archive.Register(archive.Zip, prevArchiver, prevExtractor) }) f(t) }) } } func OnEachZipExtractor(t *testing.T, f func(t *testing.T), include ...string) { extractors := map[string]archive.NewExtractorFunc{ "legacy": ziplegacy.NewExtractor, "fastzip": fastzip.NewExtractor, } for name, extractor := range extractors { if !hasArchiver(name, include) { continue } t.Run(name, func(t *testing.T) { prevArchiver, prevExtractor := archive.Register(archive.Zip, ziplegacy.NewArchiver, extractor) t.Cleanup(func() { archive.Register(archive.Zip, prevArchiver, prevExtractor) }) f(t) }) } } func hasArchiver(name string, include []string) bool { if len(include) == 0 { return true } for _, inc := range include { if inc == name { return true } } return false } ================================================ FILE: commands/helpers/helpers_cache_archiver_test.go ================================================ // Helper functions that are shared between unit tests and integration tests package helpers import ( "os" "time" "gocloud.dev/blob" ) // NewCacheArchiverCommandForTest exposes CacheArchiverCommand with fileArchiver to integration tests func NewCacheArchiverCommandForTest(file string, fileArchiverPaths []string) CacheArchiverCommand { return CacheArchiverCommand{ File: file, fileArchiver: fileArchiver{Paths: fileArchiverPaths}, } } func GetMatches(cmd *CacheArchiverCommand) map[string]os.FileInfo { return cmd.files } // SetCacheArchiverCommandMux allows integration tests to set mux func SetCacheArchiverCommandMux(cmd *CacheArchiverCommand, mux *blob.URLMux) { cmd.mux = mux } // SetCacheArchiverCommandClientTimeout allows integration tests to set the client timeout func SetCacheArchiverCommandClientTimeout(cmd *CacheArchiverCommand, timeout time.Duration) { cmd.getClient().Timeout = timeout } ================================================ FILE: commands/helpers/internal/store/store.go ================================================ package store import ( "bufio" "crypto/cipher" "crypto/rand" "crypto/sha256" "encoding/base64" "encoding/hex" "errors" "fmt" "io" "math" "os" "path/filepath" "sync" "golang.org/x/crypto/chacha20poly1305" ) type Store struct { pathname string f *os.File c cipher.AEAD mu sync.Mutex closed bool } func Open(dir string) (*Store, error) { pathname := filepath.Join(dir, "masking.db") sum := sha256.Sum256([]byte(pathname)) keyPath := filepath.Join(dir, "runner"+hex.EncodeToString(sum[:])) _ = os.MkdirAll(filepath.Dir(pathname), 0o755) _, err := os.Stat(pathname) if err != nil { // store file doesn't exist, so re-generate key if err := os.WriteFile(keyPath, generateKey(), 0o644); err != nil { return nil, fmt.Errorf("writing key: %w", err) } } f, err := openFile(pathname) if err != nil { return nil, fmt.Errorf("opening store file: %w", err) } info, err := f.Stat() if err != nil { return nil, fmt.Errorf("stat store file: %w", err) } if info.Size() == 0 { if _, err := f.Write(generateKey()); err != nil { return nil, fmt.Errorf("writing store key: %w", err) } _, _ = f.Seek(0, io.SeekStart) if err := f.Sync(); err != nil { return nil, err } } key, err := deriveEncryptionKey(f, keyPath) if err != nil { return nil, fmt.Errorf("deriving key: %w", err) } c, err := chacha20poly1305.NewX(key) if err != nil { return nil, err } return &Store{ pathname: pathname, f: f, c: c, }, nil } func (s *Store) List() ([]string, error) { buf := bufio.NewReader(io.NewSectionReader(s.f, 32, math.MaxInt64)) var results []string for { line, err := buf.ReadString('\n') if err != nil { if errors.Is(err, io.EOF) { return results, nil } return results, err } msg, err := base64.StdEncoding.DecodeString(line) if err != nil { return results, fmt.Errorf("decoding msg: %w", err) } if len(line) < s.c.NonceSize() { return results, fmt.Errorf("encrypted message length too small") } nonce, ciphertext := msg[:s.c.NonceSize()], msg[s.c.NonceSize():] plaintext, err := s.c.Open(nil, nonce, ciphertext, nil) if err != nil { return results, fmt.Errorf("opening encrypted message: %w", err) } results = append(results, string(plaintext)) } } func (s *Store) Add(phrase string) error { s.mu.Lock() defer s.mu.Unlock() if s.closed { return os.ErrClosed } input := []byte(phrase) nonce := make([]byte, s.c.NonceSize(), s.c.NonceSize()+len(input)+s.c.Overhead()) if _, err := rand.Read(nonce); err != nil { return err } line := base64.StdEncoding.EncodeToString(s.c.Seal(nonce, nonce, input, nil)) + "\n" if _, err := s.f.Write([]byte(line)); err != nil { return err } if err := s.f.Sync(); err != nil { return err } return nil } func (s *Store) Close() { s.mu.Lock() defer s.mu.Unlock() if s.closed { return } s.closed = true s.f.Close() } func generateKey() []byte { var b [32]byte _, _ = io.ReadFull(rand.Reader, b[:]) return b[:] } func deriveEncryptionKey(f *os.File, keyPath string) ([]byte, error) { var key1 [32]byte if _, err := io.ReadFull(f, key1[:]); err != nil { return nil, err } key2, err := os.ReadFile(keyPath) if err != nil { return nil, err } if len(key2) < len(key1) { return nil, fmt.Errorf("key1 and key2 not the same size") } for i := 0; i < len(key1); i++ { key1[i] ^= key2[i] } return key1[:], nil } ================================================ FILE: commands/helpers/internal/store/store_test.go ================================================ //go:build !integration package store import ( "crypto/sha256" "encoding/hex" "os" "path/filepath" "testing" "github.com/stretchr/testify/require" ) func TestOpen(t *testing.T) { t.Run("create and reopen", func(t *testing.T) { dir := t.TempDir() db, err := Open(dir) require.NoError(t, err) require.NoError(t, db.Add("test-secret")) db.Close() db, err = Open(dir) require.NoError(t, err) defer db.Close() items, err := db.List() require.NoError(t, err) require.Equal(t, []string{"test-secret"}, items) }) t.Run("recreates key when db missing", func(t *testing.T) { dir := t.TempDir() db, err := Open(dir) require.NoError(t, err) require.NoError(t, db.Add("old-secret")) db.Close() require.NoError(t, os.Remove(filepath.Join(dir, "masking.db"))) db, err = Open(dir) require.NoError(t, err) defer db.Close() items, err := db.List() require.NoError(t, err) require.Empty(t, items) }) t.Run("fails with missing key file", func(t *testing.T) { dir := t.TempDir() db, err := Open(dir) require.NoError(t, err) db.Close() pathname := filepath.Join(dir, "masking.db") sum := sha256.Sum256([]byte(pathname)) keyPath := filepath.Join(dir, "runner"+hex.EncodeToString(sum[:])) require.NoError(t, os.Remove(keyPath)) _, err = Open(dir) require.Error(t, err) }) } ================================================ FILE: commands/helpers/internal/store/store_unix.go ================================================ //go:build !windows package store import "os" func openFile(pathname string) (*os.File, error) { // Check if file exists before opening _, err := os.Stat(pathname) isNewFile := os.IsNotExist(err) f, err := os.OpenFile(pathname, os.O_APPEND|os.O_RDWR|os.O_CREATE, 0666) if err != nil { return nil, err } // Only chmod if we just created the file if isNewFile { if err := os.Chmod(pathname, 0666); err != nil { f.Close() return nil, err } } return f, nil } ================================================ FILE: commands/helpers/internal/store/store_unix_test.go ================================================ //go:build !windows && !integration package store import ( "os" "path/filepath" "syscall" "testing" "github.com/stretchr/testify/require" ) func TestOpenFilePermissions(t *testing.T) { t.Run("new file gets 0666 regardless of umask", func(t *testing.T) { oldUmask := syscall.Umask(0077) defer syscall.Umask(oldUmask) dir := t.TempDir() db, err := Open(dir) require.NoError(t, err) defer db.Close() info, err := os.Stat(filepath.Join(dir, "masking.db")) require.NoError(t, err) require.Equal(t, os.FileMode(0666), info.Mode().Perm()) }) t.Run("existing file permissions unchanged on reopen", func(t *testing.T) { dir := t.TempDir() db, err := Open(dir) require.NoError(t, err) db.Close() dbPath := filepath.Join(dir, "masking.db") require.NoError(t, os.Chmod(dbPath, 0600)) db, err = Open(dir) require.NoError(t, err) defer db.Close() info, err := os.Stat(dbPath) require.NoError(t, err) require.Equal(t, os.FileMode(0600), info.Mode().Perm()) }) } ================================================ FILE: commands/helpers/internal/store/store_windows.go ================================================ //go:build windows package store import ( "fmt" "os" "golang.org/x/sys/windows" ) // openFile is like os.OpenFile, but adds FILE_SHARE_DELETE, allowing the file // to be deleted, even when open, on Windows. func openFile(pathname string) (*os.File, error) { p, err := windows.UTF16PtrFromString(pathname) if err != nil { return nil, fmt.Errorf("converting pathname to UTF16: %w", err) } h, err := windows.CreateFile( p, windows.GENERIC_READ|windows.FILE_APPEND_DATA, windows.FILE_SHARE_READ|windows.FILE_SHARE_WRITE|windows.FILE_SHARE_DELETE, nil, windows.OPEN_ALWAYS, windows.FILE_ATTRIBUTE_NORMAL, 0, ) if err != nil { return nil, fmt.Errorf("creating file share file: %w", err) } return os.NewFile(uintptr(h), pathname), nil } ================================================ FILE: commands/helpers/internal/store/store_windows_test.go ================================================ //go:build windows && !integration package store import ( "crypto/sha256" "encoding/hex" "os" "path/filepath" "testing" "github.com/stretchr/testify/require" ) func TestDeleteOpenFile(t *testing.T) { dir := t.TempDir() pathname := filepath.Join(dir, "masking.db") sum := sha256.Sum256([]byte(pathname)) keyPath := filepath.Join(dir, "runner"+hex.EncodeToString(sum[:])) require.NoError(t, os.WriteFile(keyPath, nil, 0o640)) db, err := Open(dir) defer db.Close() require.NoError(t, err) require.NoError(t, os.Remove(pathname)) } ================================================ FILE: commands/helpers/meter/formatters.go ================================================ package meter import ( "fmt" "io" "math" "time" ) func FormatByteRate(b uint64, d time.Duration) string { b = uint64(float64(b) / math.Max(time.Nanosecond.Seconds(), d.Seconds())) rate, prefix := formatBytes(b) if prefix == 0 { return fmt.Sprintf("%d B/s", int(rate)) } return fmt.Sprintf("%.1f %cB/s", rate, prefix) } func FormatBytes(b uint64) string { size, prefix := formatBytes(b) if prefix == 0 { return fmt.Sprintf("%d B", int(size)) } return fmt.Sprintf("%.2f %cB", size, prefix) } func formatBytes(b uint64) (float64, byte) { const ( unit = 1000 prefix = "KMGTPE" ) if b < unit { return float64(b), 0 } div := int64(unit) exp := 0 for n := b / unit; n >= unit; n /= unit { div *= unit exp++ } return float64(b) / float64(div), prefix[exp] } func LabelledRateFormat(w io.Writer, label string, totalSize int64) UpdateCallback { return func(written uint64, since time.Duration, done bool) { known := "" if totalSize > UnknownTotalSize { known = "/" + FormatBytes(uint64(totalSize)) } line := fmt.Sprintf( "\r%s %s%s (%s) ", label, FormatBytes(written), known, FormatByteRate(written, since), ) if done { _, _ = fmt.Fprintln(w, line) return } _, _ = io.WriteString(w, line) } } ================================================ FILE: commands/helpers/meter/formatters_test.go ================================================ //go:build !integration package meter import ( "bytes" "fmt" "testing" "time" "github.com/stretchr/testify/assert" ) func TestFormatByteRate(t *testing.T) { tests := map[string]struct { size uint64 d time.Duration expected string }{ "format bytes": {1, time.Second, "1 B/s"}, "format kilobytes": {1000, time.Second, "1.0 KB/s"}, "format megabytes": {1000000, time.Second, "1.0 MB/s"}, "format gigabytes": {1000000000, time.Second, "1.0 GB/s"}, "format terabytes": {1000000000000, time.Second, "1.0 TB/s"}, "format petabytes": {1000000000000000, time.Second, "1.0 PB/s"}, "format exabytes": {1000000000000000000, time.Second, "1.0 EB/s"}, "format kilobytes under": {1490, time.Second, "1.5 KB/s"}, "format megabytes under": {1490000, time.Second, "1.5 MB/s"}, "format gigabytes under": {1490000000, time.Second, "1.5 GB/s"}, "format terabytes under": {1490000000000, time.Second, "1.5 TB/s"}, "format petabytes under": {1490000000000000, time.Second, "1.5 PB/s"}, "format exabytes under": {1490000000000000000, time.Second, "1.5 EB/s"}, "format kilobytes over": {1510, time.Second, "1.5 KB/s"}, "format megabytes over": {1510000, time.Second, "1.5 MB/s"}, "format gigabytes over": {1510000000, time.Second, "1.5 GB/s"}, "format terabytes over": {1510000000000, time.Second, "1.5 TB/s"}, "format petabytes over": {1510000000000000, time.Second, "1.5 PB/s"}, "format exabytes over": {1510000000000000000, time.Second, "1.5 EB/s"}, "format kilobytes exact": {1300, time.Second, "1.3 KB/s"}, "format megabytes exact": {1300000, time.Second, "1.3 MB/s"}, "format gigabytes exact": {1300000000, time.Second, "1.3 GB/s"}, "format terabytes exact": {1300000000000, time.Second, "1.3 TB/s"}, "format petabytes exact": {1300000000000000, time.Second, "1.3 PB/s"}, "format exabytes exact": {1300000000000000000, time.Second, "1.3 EB/s"}, "format bytes (non-second)": {10, 2 * time.Second, "5 B/s"}, "format bytes (zero-second)": {10, 0, "10.0 GB/s"}, } for name, tc := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tc.expected, FormatByteRate(tc.size, tc.d)) }) } } func TestFormatBytes(t *testing.T) { tests := map[string]struct { size uint64 expected string }{ "format bytes": {1, "1 B"}, "format kilobytes": {1100, "1.10 KB"}, "format megabytes": {1110000, "1.11 MB"}, "format gigabytes": {1111000000, "1.11 GB"}, "format terabytes": {1111100000000, "1.11 TB"}, "format petabytes": {1111110000000000, "1.11 PB"}, "format exabytes": {1111110000000000000, "1.11 EB"}, } for name, tc := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tc.expected, FormatBytes(tc.size)) }) } } func TestLabelledRateFormat(t *testing.T) { commonOutput := func(t *testing.T, line string, _ int64) { assert.Contains(t, line, "\rTesting formatter 10 B") assert.Contains(t, line, "(10 B/s)") } unknownTotalSizeOutput := func(t *testing.T, line string, total int64) { assert.NotContains(t, line, fmt.Sprintf("/%s", FormatBytes(uint64(total)))) } knownTotalSizeOutput := func(t *testing.T, line string, total int64) { assert.Contains(t, line, fmt.Sprintf("/%s", FormatBytes(uint64(total)))) } undoneOutput := func(t *testing.T, line string, _ int64) { assert.NotContains(t, line, "\n") } doneOutput := func(t *testing.T, line string, _ int64) { assert.Contains(t, line, "\n") } tests := map[string]struct { total int64 done bool assertOutput func(t *testing.T, line string, total int64) }{ "unknown total size undone": { total: UnknownTotalSize, done: false, assertOutput: func(t *testing.T, line string, total int64) { commonOutput(t, line, total) unknownTotalSizeOutput(t, line, total) undoneOutput(t, line, total) }, }, "unknown total size done": { total: UnknownTotalSize, done: true, assertOutput: func(t *testing.T, line string, total int64) { commonOutput(t, line, total) unknownTotalSizeOutput(t, line, total) doneOutput(t, line, total) }, }, "known total size undone": { total: 10, done: false, assertOutput: func(t *testing.T, line string, total int64) { commonOutput(t, line, total) knownTotalSizeOutput(t, line, total) undoneOutput(t, line, total) }, }, "known total size done": { total: 10, done: true, assertOutput: func(t *testing.T, line string, total int64) { commonOutput(t, line, total) knownTotalSizeOutput(t, line, total) doneOutput(t, line, total) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { buf := new(bytes.Buffer) fn := LabelledRateFormat(buf, "Testing formatter", tt.total) fn(10, 1*time.Second, tt.done) tt.assertOutput(t, buf.String(), tt.total) }) } } ================================================ FILE: commands/helpers/meter/meter.go ================================================ package meter import ( "sync" "sync/atomic" "time" ) const UnknownTotalSize = 0 type TransferMeterCommand struct { TransferMeterFrequency time.Duration `long:"transfer-meter-frequency" env:"TRANSFER_METER_FREQUENCY" description:"If set to more than 0s it enables an interactive transfer meter"` } type UpdateCallback func(written uint64, since time.Duration, done bool) type meter struct { count uint64 done, notify chan struct{} close sync.Once } func newMeter() *meter { return &meter{ done: make(chan struct{}), notify: make(chan struct{}), } } func (m *meter) start(frequency time.Duration, fn UpdateCallback) { if frequency < time.Second { frequency = time.Second } started := time.Now() go func() { defer close(m.done) ticker := time.NewTicker(frequency) defer ticker.Stop() for { fn(atomic.LoadUint64(&m.count), time.Since(started), false) select { case <-ticker.C: case <-m.notify: fn(atomic.LoadUint64(&m.count), time.Since(started), true) return } } }() } func (m *meter) doClose() { m.close.Do(func() { // notify we're done close(m.notify) // wait for close <-m.done }) } ================================================ FILE: commands/helpers/meter/reader.go ================================================ package meter import ( "io" "sync/atomic" "time" ) type reader struct { *meter r io.ReadCloser } func NewReader(r io.ReadCloser, frequency time.Duration, fn UpdateCallback) io.ReadCloser { if frequency == 0 { return r } m := &reader{ r: r, meter: newMeter(), } m.start(frequency, fn) return m } func (m *reader) Read(p []byte) (int, error) { n, err := m.r.Read(p) atomic.AddUint64(&m.count, uint64(n)) return n, err } func (m *reader) Close() error { m.doClose() return m.r.Close() } ================================================ FILE: commands/helpers/meter/reader_test.go ================================================ //go:build !integration package meter import ( "io" "strings" "sync" "testing" "time" "github.com/stretchr/testify/assert" ) func TestReader_New_NoUpdateFrequency(t *testing.T) { // the original io.ReadCloser is returned if the meter update frequency // is zero. reader := io.NopCloser(nil) m := NewReader(reader, 0, func(uint64, time.Duration, bool) {}) assert.Equal(t, reader, m) } func TestReader_New(t *testing.T) { complete := new(sync.WaitGroup) complete.Add(1) m := NewReader( io.NopCloser(strings.NewReader("foobar")), 50*time.Millisecond, func(written uint64, since time.Duration, done bool) { if done { assert.Equal(t, uint64(6), written) complete.Done() } }, ) _, err := io.Copy(io.Discard, m) assert.NoError(t, err) assert.NoError(t, m.Close()) complete.Wait() // another close shouldn't be a problem assert.NoError(t, m.Close()) } ================================================ FILE: commands/helpers/meter/writer.go ================================================ package meter import ( "errors" "io" "sync/atomic" "time" ) type writer struct { *meter w io.WriteCloser at io.WriterAt // optional: set when w also implements io.WriterAt (e.g. *os.File) } func NewWriter(w io.WriteCloser, frequency time.Duration, fn UpdateCallback) io.WriteCloser { if frequency == 0 { return w } mw := &writer{ w: w, meter: newMeter(), } if a, ok := w.(io.WriterAt); ok { mw.at = a } mw.start(frequency, fn) return mw } func (m *writer) Write(p []byte) (int, error) { n, err := m.w.Write(p) atomic.AddUint64(&m.count, uint64(n)) return n, err } func (m *writer) WriteAt(p []byte, off int64) (int, error) { if m.at == nil { return 0, errors.New("meter: underlying writer does not implement io.WriterAt") } n, err := m.at.WriteAt(p, off) atomic.AddUint64(&m.count, uint64(n)) return n, err } func (m *writer) Close() error { m.doClose() return m.w.Close() } ================================================ FILE: commands/helpers/meter/writer_test.go ================================================ //go:build !integration package meter import ( "bytes" "io" "os" "strings" "sync" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) type nopWriteCloser struct { w io.Writer } func (wc *nopWriteCloser) Write(p []byte) (int, error) { return wc.w.Write(p) } func (wc *nopWriteCloser) Close() error { return nil } func TestWriter_New_NoUpdateFrequency(t *testing.T) { // the original io.ReadCloser is returned if the meter update frequency // is zero. writer := &nopWriteCloser{w: nil} m := NewWriter(writer, 0, func(uint64, time.Duration, bool) {}) assert.Equal(t, writer, m) } func TestWriter_New(t *testing.T) { complete := new(sync.WaitGroup) complete.Add(1) buf := new(bytes.Buffer) m := NewWriter( &nopWriteCloser{w: buf}, 50*time.Millisecond, func(written uint64, since time.Duration, done bool) { if done { assert.Equal(t, uint64(6), written) complete.Done() } }, ) _, err := io.Copy(m, strings.NewReader("foobar")) assert.NoError(t, err) assert.NoError(t, m.Close()) complete.Wait() // another close shouldn't be a problem assert.NoError(t, m.Close()) } func TestWriter_WriteAt_underlyingFile(t *testing.T) { f, err := os.CreateTemp(t.TempDir(), "meter-writeat") require.NoError(t, err) t.Cleanup(func() { _ = f.Close() }) complete := new(sync.WaitGroup) complete.Add(1) m := NewWriter(f, 50*time.Millisecond, func(written uint64, since time.Duration, done bool) { if done { assert.Equal(t, uint64(5), written) complete.Done() } }) wa, ok := m.(io.WriterAt) require.True(t, ok) n, err := wa.WriteAt([]byte("hello"), 0) require.NoError(t, err) assert.Equal(t, 5, n) require.NoError(t, m.Close()) complete.Wait() got, err := os.ReadFile(f.Name()) require.NoError(t, err) assert.Equal(t, "hello", string(got)) } ================================================ FILE: commands/helpers/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package helpers import ( mock "github.com/stretchr/testify/mock" ) // newMockLogStreamProvider creates a new instance of mockLogStreamProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockLogStreamProvider(t interface { mock.TestingT Cleanup(func()) }) *mockLogStreamProvider { mock := &mockLogStreamProvider{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockLogStreamProvider is an autogenerated mock type for the logStreamProvider type type mockLogStreamProvider struct { mock.Mock } type mockLogStreamProvider_Expecter struct { mock *mock.Mock } func (_m *mockLogStreamProvider) EXPECT() *mockLogStreamProvider_Expecter { return &mockLogStreamProvider_Expecter{mock: &_m.Mock} } // Open provides a mock function for the type mockLogStreamProvider func (_mock *mockLogStreamProvider) Open() (readSeekCloser, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Open") } var r0 readSeekCloser var r1 error if returnFunc, ok := ret.Get(0).(func() (readSeekCloser, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() readSeekCloser); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(readSeekCloser) } } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // mockLogStreamProvider_Open_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Open' type mockLogStreamProvider_Open_Call struct { *mock.Call } // Open is a helper method to define mock.On call func (_e *mockLogStreamProvider_Expecter) Open() *mockLogStreamProvider_Open_Call { return &mockLogStreamProvider_Open_Call{Call: _e.mock.On("Open")} } func (_c *mockLogStreamProvider_Open_Call) Run(run func()) *mockLogStreamProvider_Open_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockLogStreamProvider_Open_Call) Return(readSeekCloserMoqParam readSeekCloser, err error) *mockLogStreamProvider_Open_Call { _c.Call.Return(readSeekCloserMoqParam, err) return _c } func (_c *mockLogStreamProvider_Open_Call) RunAndReturn(run func() (readSeekCloser, error)) *mockLogStreamProvider_Open_Call { _c.Call.Return(run) return _c } // newMockReadSeekCloser creates a new instance of mockReadSeekCloser. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockReadSeekCloser(t interface { mock.TestingT Cleanup(func()) }) *mockReadSeekCloser { mock := &mockReadSeekCloser{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockReadSeekCloser is an autogenerated mock type for the readSeekCloser type type mockReadSeekCloser struct { mock.Mock } type mockReadSeekCloser_Expecter struct { mock *mock.Mock } func (_m *mockReadSeekCloser) EXPECT() *mockReadSeekCloser_Expecter { return &mockReadSeekCloser_Expecter{mock: &_m.Mock} } // Close provides a mock function for the type mockReadSeekCloser func (_mock *mockReadSeekCloser) Close() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Close") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // mockReadSeekCloser_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close' type mockReadSeekCloser_Close_Call struct { *mock.Call } // Close is a helper method to define mock.On call func (_e *mockReadSeekCloser_Expecter) Close() *mockReadSeekCloser_Close_Call { return &mockReadSeekCloser_Close_Call{Call: _e.mock.On("Close")} } func (_c *mockReadSeekCloser_Close_Call) Run(run func()) *mockReadSeekCloser_Close_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockReadSeekCloser_Close_Call) Return(err error) *mockReadSeekCloser_Close_Call { _c.Call.Return(err) return _c } func (_c *mockReadSeekCloser_Close_Call) RunAndReturn(run func() error) *mockReadSeekCloser_Close_Call { _c.Call.Return(run) return _c } // Read provides a mock function for the type mockReadSeekCloser func (_mock *mockReadSeekCloser) Read(p []byte) (int, error) { ret := _mock.Called(p) if len(ret) == 0 { panic("no return value specified for Read") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(p) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(p) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(p) } else { r1 = ret.Error(1) } return r0, r1 } // mockReadSeekCloser_Read_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Read' type mockReadSeekCloser_Read_Call struct { *mock.Call } // Read is a helper method to define mock.On call // - p []byte func (_e *mockReadSeekCloser_Expecter) Read(p interface{}) *mockReadSeekCloser_Read_Call { return &mockReadSeekCloser_Read_Call{Call: _e.mock.On("Read", p)} } func (_c *mockReadSeekCloser_Read_Call) Run(run func(p []byte)) *mockReadSeekCloser_Read_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *mockReadSeekCloser_Read_Call) Return(n int, err error) *mockReadSeekCloser_Read_Call { _c.Call.Return(n, err) return _c } func (_c *mockReadSeekCloser_Read_Call) RunAndReturn(run func(p []byte) (int, error)) *mockReadSeekCloser_Read_Call { _c.Call.Return(run) return _c } // Seek provides a mock function for the type mockReadSeekCloser func (_mock *mockReadSeekCloser) Seek(offset int64, whence int) (int64, error) { ret := _mock.Called(offset, whence) if len(ret) == 0 { panic("no return value specified for Seek") } var r0 int64 var r1 error if returnFunc, ok := ret.Get(0).(func(int64, int) (int64, error)); ok { return returnFunc(offset, whence) } if returnFunc, ok := ret.Get(0).(func(int64, int) int64); ok { r0 = returnFunc(offset, whence) } else { r0 = ret.Get(0).(int64) } if returnFunc, ok := ret.Get(1).(func(int64, int) error); ok { r1 = returnFunc(offset, whence) } else { r1 = ret.Error(1) } return r0, r1 } // mockReadSeekCloser_Seek_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Seek' type mockReadSeekCloser_Seek_Call struct { *mock.Call } // Seek is a helper method to define mock.On call // - offset int64 // - whence int func (_e *mockReadSeekCloser_Expecter) Seek(offset interface{}, whence interface{}) *mockReadSeekCloser_Seek_Call { return &mockReadSeekCloser_Seek_Call{Call: _e.mock.On("Seek", offset, whence)} } func (_c *mockReadSeekCloser_Seek_Call) Run(run func(offset int64, whence int)) *mockReadSeekCloser_Seek_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 int64 if args[0] != nil { arg0 = args[0].(int64) } var arg1 int if args[1] != nil { arg1 = args[1].(int) } run( arg0, arg1, ) }) return _c } func (_c *mockReadSeekCloser_Seek_Call) Return(n int64, err error) *mockReadSeekCloser_Seek_Call { _c.Call.Return(n, err) return _c } func (_c *mockReadSeekCloser_Seek_Call) RunAndReturn(run func(offset int64, whence int) (int64, error)) *mockReadSeekCloser_Seek_Call { _c.Call.Return(run) return _c } // newMockLogOutputWriter creates a new instance of mockLogOutputWriter. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockLogOutputWriter(t interface { mock.TestingT Cleanup(func()) }) *mockLogOutputWriter { mock := &mockLogOutputWriter{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockLogOutputWriter is an autogenerated mock type for the logOutputWriter type type mockLogOutputWriter struct { mock.Mock } type mockLogOutputWriter_Expecter struct { mock *mock.Mock } func (_m *mockLogOutputWriter) EXPECT() *mockLogOutputWriter_Expecter { return &mockLogOutputWriter_Expecter{mock: &_m.Mock} } // Write provides a mock function for the type mockLogOutputWriter func (_mock *mockLogOutputWriter) Write(s string) { _mock.Called(s) return } // mockLogOutputWriter_Write_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Write' type mockLogOutputWriter_Write_Call struct { *mock.Call } // Write is a helper method to define mock.On call // - s string func (_e *mockLogOutputWriter_Expecter) Write(s interface{}) *mockLogOutputWriter_Write_Call { return &mockLogOutputWriter_Write_Call{Call: _e.mock.On("Write", s)} } func (_c *mockLogOutputWriter_Write_Call) Run(run func(s string)) *mockLogOutputWriter_Write_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 string if args[0] != nil { arg0 = args[0].(string) } run( arg0, ) }) return _c } func (_c *mockLogOutputWriter_Write_Call) Return() *mockLogOutputWriter_Write_Call { _c.Call.Return() return _c } func (_c *mockLogOutputWriter_Write_Call) RunAndReturn(run func(s string)) *mockLogOutputWriter_Write_Call { _c.Run(run) return _c } ================================================ FILE: commands/helpers/proxy_exec.go ================================================ package helpers import ( "debug/buildinfo" "errors" "fmt" "io" "os" "os/exec" "path/filepath" "runtime/debug" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/ajwalker/phrasestream/addmask" "gitlab.com/gitlab-org/gitlab-runner/commands/helpers/internal/store" "gitlab.com/gitlab-org/gitlab-runner/common" ) var ( stdout = io.Writer(os.Stdout) stderr = io.Writer(os.Stderr) ) type ProxyExecCommand struct { Bootstrap bool `long:"bootstrap" description:"bootstrap helper binary"` TempDir string `long:"temp-dir" description:"temporary directory"` } func NewProxyExecCommand() cli.Command { return common.NewCommand( "proxy-exec", "execute internal commands (internal)", &ProxyExecCommand{}, ) } type Proxy struct { store *store.Store addmask *addmask.AddMask } func NewProxy(dir string, stdout, stderr io.Writer) (*Proxy, error) { db, err := store.Open(dir) if err != nil { return nil, err } pe := &Proxy{store: db} pe.addmask, err = addmask.New(db, stdout, stderr) if err != nil { return nil, err } return pe, nil } func (p *Proxy) Stdout() io.Writer { return p.addmask.Get(0) } func (p *Proxy) Stderr() io.Writer { return p.addmask.Get(1) } func (p *Proxy) Close() error { p.store.Close() return p.addmask.Close() } func (c *ProxyExecCommand) Execute(cliContext *cli.Context) { args := cliContext.Args() if len(args) == 0 { logrus.Fatal("gitlab-runner-helper exec expected args") } dst := os.Getenv("RUNNER_TEMP_PROJECT_DIR") if dst == "" { dst = c.TempDir } if c.Bootstrap { if err := bootstrap(dst); err != nil { logrus.Fatalln("bootstrapping", err) } } proxy, err := NewProxy(dst, stdout, stderr) if err != nil { logrus.Fatalln("creating exec proxy", err) } cmd := exec.Command(args[0], args[1:]...) cmd.Stdin = os.Stdin cmd.Stdout = proxy.Stdout() cmd.Stderr = proxy.Stderr() err = errors.Join( cmd.Run(), proxy.Close(), ) if err != nil { logrus.Error(err) var exitError *exec.ExitError if errors.As(err, &exitError) { os.Exit(exitError.ExitCode()) } } } func bootstrap(dst string) error { src, _ := os.Executable() _ = os.MkdirAll(dst, 0o777) pathname := filepath.Join(dst, "gitlab-runner-helper") _, err := os.Stat(pathname) if err == nil { // if the path exists, check to see if it's identical by comparing build info buildInfoDst, err := buildinfo.ReadFile(pathname) if err != nil { return fmt.Errorf("reading build info of existing binary: %w", err) } buildInfoSrc, ok := debug.ReadBuildInfo() if ok && buildInfoDst.String() == buildInfoSrc.String() { return nil } } if err != nil && !errors.Is(err, os.ErrNotExist) { return fmt.Errorf("checking helper install: %w", err) } fsrc, err := os.Open(src) if err != nil { return fmt.Errorf("opening helper: %w", err) } defer fsrc.Close() fdst, err := os.CreateTemp(dst, "") if err != nil { return fmt.Errorf("creating temp file: %w", err) } defer os.RemoveAll(fdst.Name()) defer fdst.Close() if _, err := io.Copy(fdst, fsrc); err != nil { return fmt.Errorf("copying helper: %w", err) } if err := fdst.Close(); err != nil { return fmt.Errorf("closing helper: %w", err) } if err := os.Rename(fdst.Name(), pathname); err != nil { return fmt.Errorf("renaming helper: %w", err) } if err := os.Chmod(pathname, 0o777); err != nil { return fmt.Errorf("changing helper permissions: %w", err) } return nil } ================================================ FILE: commands/helpers/proxy_exec_test.go ================================================ //go:build !integration package helpers import ( "bytes" "io" "os" "path/filepath" "runtime" "testing" "github.com/stretchr/testify/require" "github.com/urfave/cli" clihelpers "gitlab.com/gitlab-org/golang-cli-helpers" ) func newProxyExecTestApp() *cli.App { cmd := &ProxyExecCommand{} app := cli.NewApp() app.Name = filepath.Base(os.Args[0]) app.Commands = append(app.Commands, cli.Command{ Name: "proxy-exec", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }) return app } func TestProxyExec(t *testing.T) { dir := t.TempDir() cmd := []string{"echo", "foobar"} if runtime.GOOS == "windows" { cmd = []string{"cmd", "/C", "echo", "foobar"} } args := append([]string{os.Args[0], "proxy-exec", "--temp-dir", dir}, cmd...) app := newProxyExecTestApp() buf := new(bytes.Buffer) defer captureOutput(buf)() require.NoError(t, app.Run(args)) require.Contains(t, buf.String(), "foobar") require.NoFileExists(t, filepath.Join(dir, "gitlab-runner-helper")) } func TestProxyExecBootstrap(t *testing.T) { dir := t.TempDir() cmd := []string{"echo", "bootstrapped"} if runtime.GOOS == "windows" { cmd = []string{"cmd", "/C", "echo", "bootstrapped"} } args := append([]string{os.Args[0], "proxy-exec", "--temp-dir", dir, "--bootstrap"}, cmd...) app := newProxyExecTestApp() buf := new(bytes.Buffer) defer captureOutput(buf)() require.NoError(t, app.Run(args)) require.Contains(t, buf.String(), "bootstrapped") require.FileExists(t, filepath.Join(dir, "gitlab-runner-helper")) } func captureOutput(w io.Writer) func() { stdout = w stderr = w return func() { stdout = os.Stdout stderr = os.Stderr } } ================================================ FILE: commands/helpers/read_logs.go ================================================ package helpers import ( "bufio" "errors" "fmt" "io" "os" "time" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" ) const ( defaultCheckFileExistsInterval = time.Second pollFileContentsTimeout = 500 * time.Millisecond outputLogFileNotExistsExitCode = 100 ) var ( errWaitingFileTimeout = errors.New("timeout waiting for file to be created") errNoAttemptsToOpenFile = errors.New("no attempts to open log file configured") ) type logStreamProvider interface { Open() (readSeekCloser, error) } type readSeekCloser interface { io.ReadSeeker io.Closer } // checkedFile checks whether a file exists when the underlying // File's Read method returns io.EOF. If a file is deleted from // the outside the Go file descriptor isn't invalidated and we // keep getting io.EOF oblivious to the fact that the file // no longer exists type checkedFile struct { *os.File } func (c *checkedFile) Read(p []byte) (int, error) { n, err := c.File.Read(p) if errors.Is(err, io.EOF) { _, statErr := os.Stat(c.File.Name()) if os.IsNotExist(statErr) { err = statErr } } return n, err } type fileLogStreamProvider struct { waitFileTimeout time.Duration path string } func (p *fileLogStreamProvider) Open() (readSeekCloser, error) { attempts := int(p.waitFileTimeout / defaultCheckFileExistsInterval) if attempts < 1 { return nil, errNoAttemptsToOpenFile } for i := 0; i < attempts; i++ { f, err := os.Open(p.path) if os.IsNotExist(err) { time.Sleep(defaultCheckFileExistsInterval) continue } return &checkedFile{File: f}, err } return nil, errWaitingFileTimeout } type logOutputWriter interface { Write(string) } type streamLogOutputWriter struct { stream io.Writer } func (s *streamLogOutputWriter) Write(data string) { _, _ = io.WriteString(s.stream, data) } type ReadLogsCommand struct { Path string `long:"path"` Offset int64 `long:"offset"` WaitFileTimeout time.Duration `long:"wait-file-timeout"` logStreamProvider logStreamProvider logOutputWriter logOutputWriter readerBufferSize int } func NewReadLogsCommand() cli.Command { return common.NewCommand( "read-logs", "reads job logs from a file, used by kubernetes executor (internal)", newReadLogsCommand(), ) } func newReadLogsCommand() *ReadLogsCommand { return &ReadLogsCommand{ logOutputWriter: &streamLogOutputWriter{stream: os.Stdout}, readerBufferSize: common.DefaultReaderBufferSize, // by default check if the file exists at least once WaitFileTimeout: defaultCheckFileExistsInterval, } } func (c *ReadLogsCommand) Execute(*cli.Context) { err := c.execute() switch { case os.IsNotExist(err): os.Exit(outputLogFileNotExistsExitCode) case err != nil: c.logOutputWriter.Write(fmt.Sprintf("error reading logs from %s: %v\n", c.Path, err)) os.Exit(1) } } func (c *ReadLogsCommand) execute() error { c.logStreamProvider = &fileLogStreamProvider{ waitFileTimeout: c.WaitFileTimeout, path: c.Path, } return c.readLogs() } func (c *ReadLogsCommand) readLogs() error { s, r, err := c.openFileReader() if err != nil { return err } defer s.Close() offset := c.Offset for { buf, err := r.ReadSlice('\n') if len(buf) > 0 { offset += int64(len(buf)) // if the buffer was filled by a message larger than the // buffer size we must make sure that it ends with a new line // so it gets properly handled by the executor which splits by new lines if buf[len(buf)-1] != '\n' { buf = append(buf, '\n') } c.logOutputWriter.Write(fmt.Sprintf("%d %s", offset, buf)) } // io.EOF means that we reached the end of the file // we try reading from it again to see if there are new contents // bufio.ErrBufferFull means that the message was larger than the buffer // we print the message so far along with a new line character // and continue reading the rest of it from the stream if errors.Is(err, io.EOF) { time.Sleep(pollFileContentsTimeout) } else if err != nil && !errors.Is(err, bufio.ErrBufferFull) { return err } } } func (c *ReadLogsCommand) openFileReader() (readSeekCloser, *bufio.Reader, error) { s, err := c.logStreamProvider.Open() if err != nil { return nil, nil, err } _, err = s.Seek(c.Offset, io.SeekStart) if err != nil { _ = s.Close() return nil, nil, err } return s, bufio.NewReaderSize(s, c.readerBufferSize), nil } ================================================ FILE: commands/helpers/read_logs_test.go ================================================ //go:build !integration package helpers import ( "fmt" "os" "strings" "sync" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/helpers/test" ) func TestNewReadLogsCommandFileNotExist(t *testing.T) { cmd := newReadLogsCommand() cmd.logStreamProvider = &fileLogStreamProvider{ waitFileTimeout: 2 * time.Second, path: "not_exists", } err := cmd.readLogs() assert.ErrorIs(t, err, errWaitingFileTimeout) } func TestNewReadLogsCommandNoAttempts(t *testing.T) { cmd := newReadLogsCommand() cmd.WaitFileTimeout = 0 err := cmd.execute() assert.ErrorIs(t, err, errNoAttemptsToOpenFile) } func TestNewReadLogsCommandFileSeekToInvalidLocation(t *testing.T) { testFile, cleanup := setupTestFile(t) defer cleanup() cmd := newReadLogsCommand() cmd.Path = testFile.Name() cmd.WaitFileTimeout = time.Minute cmd.Offset = -1 err := cmd.execute() var expectedErr *os.PathError assert.ErrorAs(t, err, &expectedErr) } func setupTestFile(t *testing.T) (*os.File, func()) { f, err := os.CreateTemp("", "") require.NoError(t, err) cleanup := func() { _ = f.Close() _ = os.Remove(f.Name()) } return f, cleanup } func TestNewReadLogsCommandFileLogStreamProviderCorrect(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) cmd := newReadLogsCommand() cmd.WaitFileTimeout = 10 * time.Second f, cleanup := setupTestFile(t) time.AfterFunc(time.Second, cleanup) cmd.Path = f.Name() err := cmd.execute() assert.True(t, os.IsNotExist(err), "expected err %T, but got %T", os.ErrNotExist, err) assert.Equal(t, &fileLogStreamProvider{ waitFileTimeout: cmd.WaitFileTimeout, path: cmd.Path, }, cmd.logStreamProvider) } func TestNewReadLogsCommandLines(t *testing.T) { lines := []string{"1", "2", "3"} f, cleanup := setupTestFile(t) defer cleanup() appendToFile(t, f, lines) cmd := newReadLogsCommand() mockLogOutputWriter := newMockLogOutputWriter(t) _, wg := setupMockLogOutputWriterFromLines(mockLogOutputWriter, lines, 0) cmd.logOutputWriter = mockLogOutputWriter mockLogStreamProvider := newMockLogStreamProvider(t) mockLogStreamProvider.On("Open").Return(f, nil) cmd.logStreamProvider = mockLogStreamProvider go func() { wg.Wait() _ = f.Close() }() err := cmd.readLogs() var expectedErr *os.PathError assert.ErrorAs(t, err, &expectedErr) } func appendToFile(t *testing.T, f *os.File, lines []string) { fw, err := os.OpenFile(f.Name(), os.O_WRONLY|os.O_APPEND, 0600) require.NoError(t, err) _, err = fw.Write([]byte(strings.Join(lines, "\n"))) require.NoError(t, err) err = fw.Close() require.NoError(t, err) } func setupMockLogOutputWriterFromLines(lw *mockLogOutputWriter, lines []string, offset int) (int, *sync.WaitGroup) { var wg sync.WaitGroup wg.Add(len(lines)) for i, l := range lines { offset += len(l) if i < len(lines)-1 { offset++ // account for the len of the newline character \n } lw.On("Write", fmt.Sprintf("%d %s\n", offset, l)).Run(func(mock.Arguments) { wg.Done() }) } return offset, &wg } func TestNewReadLogsCommandWriteLinesWithDelay(t *testing.T) { lines1 := []string{"1", "2", "3"} lines2 := []string{"4", "5", "6"} f, cleanup := setupTestFile(t) defer cleanup() appendToFile(t, f, lines1) cmd := newReadLogsCommand() mockLogOutputWriter := newMockLogOutputWriter(t) offset, wg := setupMockLogOutputWriterFromLines(mockLogOutputWriter, lines1, 0) cmd.logOutputWriter = mockLogOutputWriter mockLogStreamProvider := newMockLogStreamProvider(t) mockLogStreamProvider.On("Open").Return(f, nil) cmd.logStreamProvider = mockLogStreamProvider go func() { wg.Wait() time.Sleep(5 * time.Second) _, wg = setupMockLogOutputWriterFromLines(mockLogOutputWriter, lines2, offset) appendToFile(t, f, lines2) wg.Wait() _ = f.Close() }() err := cmd.readLogs() var expectedErr *os.PathError assert.ErrorAs(t, err, &expectedErr) } func TestSplitLinesAccordingToBufferSize(t *testing.T) { lines := []string{strings.Repeat("1", 32), strings.Repeat("2", 32)} f, cleanup := setupTestFile(t) defer cleanup() appendToFile(t, f, lines) cmd := newReadLogsCommand() cmd.readerBufferSize = 16 // this is the minimum allowed buffer size by bufio.NewReader mockLogOutputWriter := newMockLogOutputWriter(t) var wg sync.WaitGroup wg.Add(5) var wgDone = func(mock.Arguments) { wg.Done() } mockLogOutputWriter.On("Write", fmt.Sprintf("16 %s\n", strings.Repeat("1", 16))).Run(wgDone) mockLogOutputWriter.On("Write", fmt.Sprintf("32 %s\n", strings.Repeat("1", 16))).Run(wgDone) mockLogOutputWriter.On("Write", "33 \n").Run(wgDone) mockLogOutputWriter.On("Write", fmt.Sprintf("49 %s\n", strings.Repeat("2", 16))).Run(wgDone) mockLogOutputWriter.On("Write", fmt.Sprintf("65 %s\n", strings.Repeat("2", 16))).Run(wgDone) cmd.logOutputWriter = mockLogOutputWriter mockLogStreamProvider := newMockLogStreamProvider(t) mockLogStreamProvider.On("Open").Return(f, nil) cmd.logStreamProvider = mockLogStreamProvider go func() { wg.Wait() _ = f.Close() }() err := cmd.readLogs() var expectedErr *os.PathError assert.ErrorAs(t, err, &expectedErr) } func TestSeek(t *testing.T) { lines := []string{strings.Repeat("1", 32)} f, cleanup := setupTestFile(t) defer cleanup() appendToFile(t, f, lines) cmd := newReadLogsCommand() cmd.Offset = 16 cmd.readerBufferSize = 16 // this is the minimum allowed buffer size by bufio.NewReader mockLogOutputWriter := newMockLogOutputWriter(t) var wg sync.WaitGroup wg.Add(1) var wgDone = func(mock.Arguments) { wg.Done() } mockLogOutputWriter.On("Write", fmt.Sprintf("32 %s\n", strings.Repeat("1", 16))).Run(wgDone) cmd.logOutputWriter = mockLogOutputWriter mockLogStreamProvider := newMockLogStreamProvider(t) mockLogStreamProvider.On("Open").Return(f, nil) cmd.logStreamProvider = mockLogStreamProvider go func() { wg.Wait() _ = f.Close() }() err := cmd.readLogs() var expectedErr *os.PathError assert.ErrorAs(t, err, &expectedErr) } ================================================ FILE: commands/helpers/retry_helper.go ================================================ package helpers import ( "encoding/xml" "errors" "fmt" "io" "net/http" "time" "github.com/sirupsen/logrus" ) // Cloud Providers supported currently send error in case of HTTP API request failure in XML Format // The Format spec is the same for: // GCS: https://cloud.google.com/storage/docs/xml-api/reference-status // AWS S3: https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#RESTErrorResponses // and Azure Blob Storage: https://learn.microsoft.com/en-us/rest/api/storageservices/status-and-error-codes2 // storageErrorResponse is used to deserialize such error responses and provide better error failures message in the log. type storageErrorResponse struct { XMLName xml.Name `xml:"Error"` Code string `xml:"Code"` Message string `xml:"Message"` } func (ser *storageErrorResponse) isValid() bool { return ser.Code != "" || ser.Message != "" } func (ser *storageErrorResponse) String() string { if !ser.isValid() { return "" } msg := "" if ser.Code != "" { msg = "code: " + ser.Code } if ser.Message != "" { msg += ", message: " + ser.Message } return msg } type retryHelper struct { Retry int `long:"retry" description:"How many times to retry upload"` RetryTime time.Duration `long:"retry-time" description:"How long to wait between retries"` } // retryableErr indicates that an error can be retried. To specify that an error // can be retried simply wrap the original error. For example: // // retryableErr{err: errors.New("some error")} type retryableErr struct { err error } func (e retryableErr) Unwrap() error { return e.err } func (e retryableErr) Error() string { return e.err.Error() } func (r *retryHelper) doRetry(handler func(int) error) error { err := handler(0) for retry := 1; retry <= r.Retry; retry++ { if _, ok := err.(retryableErr); !ok { return err } time.Sleep(r.RetryTime) logrus.WithError(err).Warningln("Retrying...") err = handler(retry) } return err } // retryOnServerError will take the response and check if the error should // be of type retryableErr or not. When the status code is of 5xx it will be a // retryableErr. func retryOnServerError(resp *http.Response) error { if resp.StatusCode/100 == 2 { return nil } errResp := &storageErrorResponse{} bodyBytes, _ := io.ReadAll(resp.Body) _ = resp.Body.Close() errMsg := fmt.Sprintf("received: %s", resp.Status) if err := xml.Unmarshal(bodyBytes, errResp); err == nil && errResp.isValid() { errMsg = fmt.Sprintf("%s. Request failed with %s", errMsg, errResp.String()) } err := errors.New(errMsg) if resp.StatusCode/100 == 5 { err = retryableErr{err: err} } return err } ================================================ FILE: commands/helpers/retry_helper_test.go ================================================ //go:build !integration package helpers import ( "errors" "fmt" "io" "net/http" "strings" "testing" "github.com/stretchr/testify/assert" ) func TestDoRetry(t *testing.T) { cases := []struct { name string err error expectedCount int }{ { name: "Error is of type retryableErr", err: retryableErr{err: errors.New("error")}, expectedCount: 4, }, { name: "Error is not type of retryableErr", err: errors.New("error"), expectedCount: 1, }, { name: "Error is nil", err: nil, expectedCount: 1, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { r := retryHelper{ Retry: 3, } retryCount := 0 err := r.doRetry(func(_ int) error { retryCount++ return c.err }) assert.Equal(t, c.err, err) assert.Equal(t, c.expectedCount, retryCount) }) } } func TestRetryOnServerError(t *testing.T) { cases := map[string]struct { resp func() *http.Response err error }{ "successful request": { resp: func() *http.Response { return &http.Response{ Status: fmt.Sprintf("%d %s", http.StatusOK, http.StatusText(http.StatusOK)), StatusCode: http.StatusOK, } }, }, "failed request without xml format": { resp: func() *http.Response { return &http.Response{ Status: fmt.Sprintf("%d %s", http.StatusForbidden, http.StatusText(http.StatusForbidden)), StatusCode: http.StatusForbidden, Body: io.NopCloser(strings.NewReader("Forbidden")), } }, err: errors.New("received: 403 Forbidden"), }, "failed request with xml format": { resp: func() *http.Response { return &http.Response{ Status: fmt.Sprintf("%d %s", http.StatusForbidden, http.StatusText(http.StatusForbidden)), StatusCode: http.StatusForbidden, Body: io.NopCloser(strings.NewReader(` UploadFailure Upload failure message `)), } }, err: errors.New("received: 403 Forbidden. Request failed with code: UploadFailure, message: Upload failure message"), }, } for tn, tc := range cases { t.Run(tn, func(t *testing.T) { err := retryOnServerError(tc.resp()) assert.Equal(t, tc.err, err) }) } } ================================================ FILE: commands/helpers/testdata/test-artifacts/file-0 ================================================ file-0 ================================================ FILE: commands/helpers/testdata/test-artifacts/file-1 ================================================ file-1 ================================================ FILE: commands/helpers/testdata/test-artifacts/file-2 ================================================ file-2 ================================================ FILE: commands/helpers/testdata/test-artifacts/file-3 ================================================ file-3 ================================================ FILE: commands/helpers/testdata/test-artifacts/file-4 ================================================ file-4 ================================================ FILE: commands/helpers_register_test.go ================================================ // Helper functions that are shared between unit tests and integration tests package commands import ( "bufio" "bytes" "os" "testing" "time" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors" ) var RegisterTimeNowDate = time.Date(2020, 01, 01, 10, 10, 10, 0, time.UTC) // NewRegisterCommandForTest exposes RegisterCommand to integration tests func NewRegisterCommandForTest(reader *bufio.Reader, network common.Network, executorProviders executors.Providers) *RegisterCommand { cmd := newRegisterCommand(network, executorProviders) cmd.reader = reader cmd.timeNowFn = func() time.Time { return RegisterTimeNowDate } return cmd } func GetLogrusOutput(t *testing.T, hook *test.Hook) string { buf := &bytes.Buffer{} for _, entry := range hook.AllEntries() { message, err := entry.String() require.NoError(t, err) buf.WriteString(message) } return buf.String() } func PrepareConfigurationTemplateFile(t *testing.T, content string) (string, func()) { file, err := os.CreateTemp("", "config.template.toml") require.NoError(t, err) defer func() { err = file.Close() require.NoError(t, err) }() _, err = file.WriteString(content) require.NoError(t, err) cleanup := func() { _ = os.Remove(file.Name()) } return file.Name(), cleanup } ================================================ FILE: commands/internal/configfile/configfile.go ================================================ package configfile import ( "fmt" "path/filepath" "sync" "time" "github.com/prometheus/client_golang/prometheus" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" ) type ConfigFile struct { mu sync.Mutex cfg *common.Config systemID string pathname string accessCollector *configAccessCollector } func New(pathname string, opts ...Option) *ConfigFile { var options options for _, opt := range opts { opt(&options) } cfg := &ConfigFile{pathname: pathname} if options.AccessCollector { cfg.accessCollector = newConfigAccessCollector() } cfg.cfg = options.Config cfg.systemID = options.SystemID return cfg } func (cf *ConfigFile) Load(opts ...LoadOption) error { var options loadOptions for _, opt := range opts { opt(&options) } cf.mu.Lock() defer cf.mu.Unlock() config := common.NewConfig() err := config.LoadConfig(cf.pathname) if err != nil { if cf.accessCollector != nil { cf.accessCollector.loadingError.Inc() } return err } // restore config saver if cf.cfg != nil { config.ConfigSaver = cf.cfg.ConfigSaver } // config validation is best-effort if err := validate(config); err != nil { logrus.Infof( "There might be a problem with your config based on "+ "jsonschema annotations in common/config.go "+ "(experimental feature):\n%v\n", err, ) } if cf.accessCollector != nil { cf.accessCollector.loaded.Inc() } if cf.systemID == "" { systemIDState, err := newSystemIDState(filepath.Join(filepath.Dir(cf.pathname), ".runner_system_id")) if err != nil { return fmt.Errorf("loading system ID file: %w", err) } cf.systemID = systemIDState.GetSystemID() } cf.cfg = config for _, runnerCfg := range cf.cfg.Runners { runnerCfg.SystemID = cf.systemID runnerCfg.ConfigLoadedAt = time.Now() runnerCfg.ConfigDir = filepath.Dir(cf.pathname) } for _, mutate := range options.Mutate { if err := mutate(cf.cfg); err != nil { return fmt.Errorf("mutate config: %w", err) } } return nil } func (cf *ConfigFile) SystemID() string { return cf.systemID } func (cf *ConfigFile) Save() error { err := cf.cfg.SaveConfig(cf.pathname) if err != nil { if cf.accessCollector != nil { cf.accessCollector.savingError.Inc() } return err } if cf.accessCollector != nil { cf.accessCollector.saved.Inc() } return nil } func (cf *ConfigFile) Config() *common.Config { cf.mu.Lock() defer cf.mu.Unlock() return cf.cfg } func (cf *ConfigFile) AccessCollector() prometheus.Collector { return cf.accessCollector } ================================================ FILE: commands/internal/configfile/configfile_test.go ================================================ //go:build !integration package configfile import ( "os" "path/filepath" "testing" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" ) func Test_loadConfig(t *testing.T) { const expectedSystemIDRegexPattern = "^[sr]_[0-9a-zA-Z]{12}$" testCases := map[string]struct { runnerSystemID string prepareFn func(t *testing.T, systemIDFile string) assertFn func(t *testing.T, err error, config *common.Config, systemIDFile string) }{ "generates and saves missing system IDs": { runnerSystemID: "", assertFn: func(t *testing.T, err error, config *common.Config, systemIDFile string) { assert.NoError(t, err) require.Equal(t, 1, len(config.Runners)) assert.NotEmpty(t, config.Runners[0].SystemID) content, err := os.ReadFile(systemIDFile) require.NoError(t, err) assert.Contains(t, string(content), config.Runners[0].SystemID) }, }, "preserves existing unique system IDs": { runnerSystemID: "s_c2d22f638c25", assertFn: func(t *testing.T, err error, config *common.Config, _ string) { assert.NoError(t, err) require.Equal(t, 1, len(config.Runners)) assert.Equal(t, "s_c2d22f638c25", config.Runners[0].SystemID) }, }, "regenerates system ID if file is invalid": { runnerSystemID: "0123456789", assertFn: func(t *testing.T, err error, config *common.Config, _ string) { assert.NoError(t, err) require.Equal(t, 1, len(config.Runners)) assert.Regexp(t, expectedSystemIDRegexPattern, config.Runners[0].SystemID) }, }, "succeeds if file cannot be created": { runnerSystemID: "", prepareFn: func(t *testing.T, systemIDFile string) { require.NoError(t, os.Remove(systemIDFile)) require.NoError(t, os.Chmod(filepath.Dir(systemIDFile), os.ModeDir|0500)) }, assertFn: func(t *testing.T, err error, config *common.Config, _ string) { require.NoError(t, err) require.Equal(t, 1, len(config.Runners)) assert.Regexp(t, expectedSystemIDRegexPattern, config.Runners[0].SystemID) }, }, } const config = ` [[runners]] name = "runner" token = "glrt-some-random-token" url = "https://some.gitlab.instance.tld/" ` for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { dir := t.TempDir() cfgName := filepath.Join(dir, "config.toml") systemIDFile := filepath.Join(dir, ".runner_system_id") require.NoError(t, os.Chmod(dir, 0777)) require.NoError(t, os.WriteFile(cfgName, []byte(config), 0777)) require.NoError(t, os.WriteFile(systemIDFile, []byte(tc.runnerSystemID), 0777)) if tc.prepareFn != nil { tc.prepareFn(t, systemIDFile) } logGlobal := test.NewGlobal() cfg := New(cfgName) err := cfg.Load() for _, entry := range logGlobal.AllEntries() { assert.NotContains(t, entry.Message, "problem with your config based on jsonschema annotations") } tc.assertFn(t, err, cfg.Config(), systemIDFile) // Cleanup require.NoError(t, os.Chmod(dir, 0777)) }) } } ================================================ FILE: commands/internal/configfile/metrics.go ================================================ package configfile import "github.com/prometheus/client_golang/prometheus" var ( _ prometheus.Collector = &configAccessCollector{} ) type configAccessCollector struct { loadingError prometheus.Counter loaded prometheus.Counter savingError prometheus.Counter saved prometheus.Counter } func newConfigAccessCollector() *configAccessCollector { return &configAccessCollector{ loadingError: prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_configuration_loading_error_total", Help: "Total number of times the configuration file was not loaded by Runner process due to errors", }), loaded: prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_configuration_loaded_total", Help: "Total number of times the configuration file was loaded by Runner process", }), savingError: prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_configuration_saving_error_total", Help: "Total number of times the configuration file was not saved by Runner process due to errors", }), saved: prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_configuration_saved_total", Help: "Total number of times the configuration file was saved by Runner process", }), } } func (c *configAccessCollector) Describe(descs chan<- *prometheus.Desc) { c.loadingError.Describe(descs) c.loaded.Describe(descs) c.savingError.Describe(descs) c.saved.Describe(descs) } func (c *configAccessCollector) Collect(metrics chan<- prometheus.Metric) { c.loadingError.Collect(metrics) c.loaded.Collect(metrics) c.savingError.Collect(metrics) c.saved.Collect(metrics) } ================================================ FILE: commands/internal/configfile/options.go ================================================ package configfile import "gitlab.com/gitlab-org/gitlab-runner/common" type options struct { AccessCollector bool Config *common.Config SystemID string } type Option func(*options) func WithAccessCollector() Option { return func(o *options) { o.AccessCollector = true } } func WithExistingConfig(config *common.Config) Option { return func(o *options) { o.Config = config } } func WithSystemID(systemID string) Option { return func(o *options) { o.SystemID = systemID } } type loadOptions struct { Mutate []func(cfg *common.Config) error } type LoadOption func(*loadOptions) func WithMutateOnLoad(fn func(cfg *common.Config) error) LoadOption { return func(o *loadOptions) { o.Mutate = append(o.Mutate, fn) } } ================================================ FILE: commands/internal/configfile/system_id_state.go ================================================ package configfile import ( "crypto/hmac" "crypto/rand" "crypto/sha256" "encoding/hex" "fmt" "math/big" "os" "path/filepath" "regexp" "strings" "github.com/denisbrodbeck/machineid" "github.com/sirupsen/logrus" ) type systemIDState struct { systemID string } func newSystemIDState(filePath string) (*systemIDState, error) { state := &systemIDState{} err := state.loadFromFile(filePath) if err != nil { return nil, err } // ensure we have a system ID if state.GetSystemID() == "" { err = state.ensureSystemID() if err != nil { return nil, err } err = state.saveConfig(filePath) if err != nil { logrus. WithFields(logrus.Fields{ "state_file": filePath, "system_id": state.GetSystemID(), }). Warningf("Couldn't save new system ID on state file. "+ "In order to reliably identify this runner in jobs with a known identifier,\n"+ "please ensure there is a text file at the location specified in `state_file` "+ "with the contents of `system_id`. Example: echo %q > %q\n", state.GetSystemID(), filePath) } } return state, nil } func (s *systemIDState) GetSystemID() string { return s.systemID } func (s *systemIDState) loadFromFile(filePath string) error { _, err := os.Stat(filePath) // permission denied is soft error if os.IsNotExist(err) { return nil } else if err != nil { return fmt.Errorf("opening runner system ID file: %w", err) } var contents []byte if contents, err = os.ReadFile(filePath); err != nil { return fmt.Errorf("reading from runner system ID file: %w", err) } // Return a system ID only if a properly formatted value is found systemID := strings.TrimSpace(string(contents)) if ok, err := regexp.MatchString("^[sr]_[0-9a-zA-Z]{12}$", systemID); err == nil && ok { s.systemID = systemID } else if err != nil { return fmt.Errorf("checking runner system ID: %w", err) } return nil } func (s *systemIDState) saveConfig(filePath string) error { // create directory to store configuration err := os.MkdirAll(filepath.Dir(filePath), 0700) if err != nil { return fmt.Errorf("creating directory: %w", err) } // write config file err = os.WriteFile(filePath, []byte(s.systemID), 0o600) if err != nil { return fmt.Errorf("writing the runner system ID: %w", err) } return nil } func (s *systemIDState) ensureSystemID() error { if s.systemID != "" { return nil } if systemID, err := GenerateUniqueSystemID(); err == nil { logrus.WithField("system_id", systemID).Info("Created missing unique system ID") s.systemID = systemID } else { return fmt.Errorf("generating unique system ID: %w", err) } return nil } func GenerateUniqueSystemID() (string, error) { const idLength = 12 systemID, err := machineid.ID() if err == nil && systemID != "" { mac := hmac.New(sha256.New, []byte(systemID)) mac.Write([]byte("gitlab-runner")) systemID = hex.EncodeToString(mac.Sum(nil)) return "s_" + systemID[0:idLength], nil } // fallback to a random ID return generateRandomSystemID(idLength) } func generateRandomSystemID(idLength int) (string, error) { const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" b := make([]byte, idLength) max := big.NewInt(int64(len(charset))) for i := range b { r, err := rand.Int(rand.Reader, max) if err != nil { return "", err } b[i] = charset[r.Int64()] } return "r_" + string(b), nil } ================================================ FILE: commands/internal/configfile/system_id_state_test.go ================================================ //go:build !integration package configfile import ( "os" "regexp" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestSystemIDStateLoadFromFile(t *testing.T) { tests := map[string]struct { contents string validateState func(t *testing.T, s *systemIDState) }{ "parse system_id": { contents: ` s_c2d22f638c25 `, validateState: func(t *testing.T, s *systemIDState) { assert.Equal(t, "s_c2d22f638c25", s.GetSystemID()) }, }, "parse empty system_id generates new": { contents: "", validateState: func(t *testing.T, s *systemIDState) { assert.Regexp(t, regexp.MustCompile("[rs]_[0-9a-zA-Z]{12}"), s.GetSystemID()) }, }, "parse invalid system_id generates new": { contents: "foooooooor_000000000000barrrrr", validateState: func(t *testing.T, s *systemIDState) { assert.Regexp(t, regexp.MustCompile("[rs]_[0-9a-zA-Z]{12}"), s.GetSystemID()) }, }, "parse valid system_id with garbage in the file header generates new": { contents: ` garbage r_c2d22f638c25`, validateState: func(t *testing.T, s *systemIDState) { assert.Regexp(t, regexp.MustCompile("[rs]_[0-9a-zA-Z]{12}"), s.GetSystemID()) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { stateFile, err := os.CreateTemp("", ".runner_system_id") require.NoError(t, err) _, err = stateFile.WriteString(tt.contents) require.NoError(t, err) _ = stateFile.Close() defer func() { _ = os.Remove(stateFile.Name()) }() state, err := newSystemIDState(stateFile.Name()) assert.NoError(t, err) if tt.validateState != nil { tt.validateState(t, state) } }) } } func TestSystemIDStateLoadFromMissingFile(t *testing.T) { stateFile, err := os.CreateTemp("", ".runner_system_id") require.NoError(t, err) stateFileName := stateFile.Name() _ = os.Remove(stateFileName) state, err := newSystemIDState(stateFileName) assert.NoError(t, err) assert.Regexp(t, regexp.MustCompile("[rs]_[0-9a-zA-Z]{12}"), state.GetSystemID()) } func TestSaveSystemIDState(t *testing.T) { stateFile, err := os.CreateTemp("", ".runner_system_id") require.NoError(t, err) stateFileName := stateFile.Name() _ = stateFile.Close() defer func() { _ = os.Remove(stateFileName) }() state, err := newSystemIDState(stateFile.Name()) assert.NoError(t, err) buf, err := os.ReadFile(stateFileName) require.NoError(t, err) assert.Equal(t, state.GetSystemID(), string(buf)) } func TestSaveSystemIDStateToNonFile(t *testing.T) { stateFileName := os.TempDir() + "/." _, err := newSystemIDState(stateFileName) assert.Error(t, err) } ================================================ FILE: commands/internal/configfile/validation.go ================================================ package configfile import ( "bytes" "encoding/json" jsonschema_generator "github.com/invopop/jsonschema" jsonschema_validator "github.com/santhosh-tekuri/jsonschema/v6" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" ) var configSchema *jsonschema_validator.Schema func init() { defer func() { if r := recover(); r != nil { // Config validation is best-effort logrus.Warningf("Something went wrong creating config schema: %v", r) } }() r := &jsonschema_generator.Reflector{ RequiredFromJSONSchemaTags: true, DoNotReference: true, } schema, err := json.Marshal(r.Reflect(&common.Config{})) if err != nil { panic(err) } doc, err := jsonschema_validator.UnmarshalJSON(bytes.NewReader(schema)) if err != nil { panic(err) } c := jsonschema_validator.NewCompiler() if err := c.AddResource("config_schema.json", doc); err != nil { panic(err) } configSchema = c.MustCompile("config_schema.json") } func validate(config *common.Config) error { defer func() { if r := recover(); r != nil { // Config validation is best-effort logrus.Warningf("Something went wrong validating config: %v", r) } }() // Validation must be done on generic types so we re-unmarshal the config into a JSON value configString, err := json.Marshal(config) if err != nil { panic(err) } jsonValue, err := jsonschema_validator.UnmarshalJSON(bytes.NewReader(configString)) if err != nil { panic(err) } return configSchema.Validate(jsonValue) } ================================================ FILE: commands/list.go ================================================ package commands import ( "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" ) type ListCommand struct { ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` } func NewListCommand() cli.Command { return common.NewCommand("list", "List all configured runners", &ListCommand{}) } func (c *ListCommand) Execute(context *cli.Context) { cfg := configfile.New(c.ConfigFile) err := cfg.Load() if err != nil { logrus.Warningln(err) return } logrus.WithFields(logrus.Fields{ "ConfigFile": c.ConfigFile, }).Println("Listing configured runners") for _, runner := range cfg.Config().Runners { logrus.WithFields(logrus.Fields{ "Executor": runner.RunnerSettings.Executor, "Token": runner.RunnerCredentials.Token, "URL": runner.RunnerCredentials.URL, }).Println(runner.Name) } } ================================================ FILE: commands/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package commands import ( mock "github.com/stretchr/testify/mock" ) // newMockRunAtTask creates a new instance of mockRunAtTask. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockRunAtTask(t interface { mock.TestingT Cleanup(func()) }) *mockRunAtTask { mock := &mockRunAtTask{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockRunAtTask is an autogenerated mock type for the runAtTask type type mockRunAtTask struct { mock.Mock } type mockRunAtTask_Expecter struct { mock *mock.Mock } func (_m *mockRunAtTask) EXPECT() *mockRunAtTask_Expecter { return &mockRunAtTask_Expecter{mock: &_m.Mock} } // cancel provides a mock function for the type mockRunAtTask func (_mock *mockRunAtTask) cancel() { _mock.Called() return } // mockRunAtTask_cancel_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'cancel' type mockRunAtTask_cancel_Call struct { *mock.Call } // cancel is a helper method to define mock.On call func (_e *mockRunAtTask_Expecter) cancel() *mockRunAtTask_cancel_Call { return &mockRunAtTask_cancel_Call{Call: _e.mock.On("cancel")} } func (_c *mockRunAtTask_cancel_Call) Run(run func()) *mockRunAtTask_cancel_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockRunAtTask_cancel_Call) Return() *mockRunAtTask_cancel_Call { _c.Call.Return() return _c } func (_c *mockRunAtTask_cancel_Call) RunAndReturn(run func()) *mockRunAtTask_cancel_Call { _c.Run(run) return _c } ================================================ FILE: commands/multi.go ================================================ package commands import ( "context" "errors" "fmt" "net" "net/http" "net/http/pprof" "os" "os/signal" "path/filepath" "runtime" "strings" "sync" "syscall" "time" "github.com/kardianos/service" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/collectors" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/certificate" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" prometheus_helper "gitlab.com/gitlab-org/gitlab-runner/helpers/prometheus" "gitlab.com/gitlab-org/gitlab-runner/helpers/sentry" service_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/service" "gitlab.com/gitlab-org/gitlab-runner/helpers/usage_log" "gitlab.com/gitlab-org/gitlab-runner/helpers/usage_log/logrotate" "gitlab.com/gitlab-org/gitlab-runner/log" "gitlab.com/gitlab-org/gitlab-runner/network" "gitlab.com/gitlab-org/gitlab-runner/session" ) const ( workerSlotOperationStarted = "started" workerSlotOperationStopped = "stopped" ) const ( workerProcessingFailureOther = "other" workerProcessingFailureNoFreeExecutor = "no_free_executor" workerProcessingFailureJobFailure = "job_failure" ) var ( concurrentDesc = prometheus.NewDesc( "gitlab_runner_concurrent", "The current value of concurrent setting", nil, nil, ) limitDesc = prometheus.NewDesc( "gitlab_runner_limit", "The current value of concurrent setting", []string{"runner", "runner_name", "system_id"}, nil, ) ) type runAtTask interface { cancel() } type runAtTimerTask struct { timer *time.Timer } func (t *runAtTimerTask) cancel() { t.timer.Stop() } func runAt(t time.Time, f func()) runAtTask { timer := time.AfterFunc(time.Until(t), f) task := runAtTimerTask{ timer: timer, } return &task } type RunCommand struct { network common.Network executorProviders executors.Providers healthHelper healthHelper buildsHelper buildsHelper configfile *configfile.ConfigFile ListenAddress string `long:"listen-address" env:"LISTEN_ADDRESS" description:"Metrics / pprof server listening address"` ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` ServiceName string `short:"n" long:"service" description:"Use different names for different services"` WorkingDirectory string `short:"d" long:"working-directory" description:"Specify custom working directory"` User string `short:"u" long:"user" description:"Use specific user to execute shell scripts"` Syslog bool `long:"syslog" description:"Log to system service logger" env:"LOG_SYSLOG"` // sentry.LogHook is a struct, so accesses are not atomic. Use the sentryLogHookMutex to ensure // mutual exclusion. sentryLogHookMutex sync.Mutex sentryLogHook sentry.LogHook networkMutex sync.Mutex prometheusLogHook prometheus_helper.LogHook failuresCollector *prometheus_helper.FailuresCollector apiRequestsCollector prometheus.Collector inputsMetricsCollector *spec.JobInputsMetricsCollector sessionServer *session.Server usageLogger *usage_log.Storage // abortBuilds is used to abort running builds abortBuilds chan os.Signal // runInterruptSignal is used to abort current operation (scaling workers, waiting for config) runInterruptSignal chan os.Signal // reloadSignal is used to trigger forceful config reload reloadSignal chan os.Signal // stopSignals is to catch a signals notified to process: SIGTERM, SIGQUIT, Interrupt, Kill stopSignals chan os.Signal // stopSignal is used to preserve the signal that was used to stop the // process In case this is SIGQUIT it makes to finish all builds and session // server. stopSignal os.Signal // configReloaded is used to notify that the config has been reloaded configReloaded chan int // runFinished is used to notify that run() did finish runFinished chan bool currentWorkers int reloadConfigInterval time.Duration runAt func(time.Time, func()) runAtTask runnerWorkerSlots prometheus.Gauge runnerWorkersFeeds *prometheus.CounterVec runnerWorkersFeedFailures *prometheus.CounterVec runnerWorkerSlotOperations *prometheus.CounterVec runnerWorkerProcessingFailure *prometheus.CounterVec } func NewRunCommand(n common.Network, apiRequestsCollector prometheus.Collector, executorProviders executors.Providers) cli.Command { cmd := &RunCommand{ ServiceName: defaultServiceName, network: n, executorProviders: executorProviders, apiRequestsCollector: apiRequestsCollector, inputsMetricsCollector: spec.NewJobInputsMetricsCollector(), prometheusLogHook: prometheus_helper.NewLogHook(), failuresCollector: prometheus_helper.NewFailuresCollector(), healthHelper: newHealthHelper(), buildsHelper: newBuildsHelper(), runAt: runAt, reloadConfigInterval: common.ReloadConfigInterval, } return common.NewCommand("run", "run multi runner service", cmd) } func (mr *RunCommand) log() *logrus.Entry { config := mr.configfile.Config() concurrent := 0 if config != nil { concurrent = config.Concurrent } return logrus.WithFields(logrus.Fields{ "builds": mr.buildsHelper.buildsCount(), "max_builds": concurrent, }) } // Start is the method implementing `github.com/kardianos/service`.`Interface` // interface. It's responsible for a non-blocking initialization of the process. When it exits, // the main control flow is passed to runWait() configured as service's RunWait method. Take a look // into Execute() for details. func (mr *RunCommand) Start(_ service.Service) error { mr.abortBuilds = make(chan os.Signal) mr.runInterruptSignal = make(chan os.Signal, 1) mr.reloadSignal = make(chan os.Signal, 1) mr.configReloaded = make(chan int, 1) mr.runFinished = make(chan bool, 1) mr.stopSignals = make(chan os.Signal) mr.log().Info("Starting multi-runner from ", mr.ConfigFile, "...") mr.setupInternalMetrics() userModeWarning(false) if mr.WorkingDirectory != "" { err := os.Chdir(mr.WorkingDirectory) if err != nil { return err } } err := mr.reloadConfig() if err != nil { return err } config := mr.configfile.Config() for _, runner := range config.Runners { mr.runnerWorkersFeeds.WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()).Add(0) mr.runnerWorkersFeedFailures. WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()). Add(0) mr.runnerWorkerProcessingFailure. WithLabelValues( workerProcessingFailureOther, runner.ShortDescription(), runner.Name, runner.GetSystemID(), ). Add(0) mr.runnerWorkerProcessingFailure. WithLabelValues( workerProcessingFailureNoFreeExecutor, runner.ShortDescription(), runner.Name, runner.GetSystemID(), ). Add(0) mr.runnerWorkerProcessingFailure. WithLabelValues( workerProcessingFailureJobFailure, runner.ShortDescription(), runner.Name, runner.GetSystemID(), ). Add(0) } mr.runnerWorkerSlots.Set(0) mr.runnerWorkerSlotOperations.WithLabelValues(workerSlotOperationStarted).Add(0) mr.runnerWorkerSlotOperations.WithLabelValues(workerSlotOperationStopped).Add(0) // Start should not block. Do the actual work async. go mr.run() return nil } func (mr *RunCommand) setupInternalMetrics() { mr.runnerWorkersFeeds = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_worker_feeds_total", Help: "Total number of times that runner worker is fed to the main loop", }, []string{"runner", "runner_name", "system_id"}, ) mr.runnerWorkersFeedFailures = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_worker_feed_failures_total", Help: "Total number of times that runner worker feeding have failed", }, []string{"runner", "runner_name", "system_id"}, ) mr.runnerWorkerSlots = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "gitlab_runner_worker_slots_number", Help: "Current number of runner worker slots", }) mr.runnerWorkerSlotOperations = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_worker_slot_operations_total", Help: "Total number of runner workers slot operations (starting and stopping slots)", }, []string{"operation"}, ) mr.runnerWorkerProcessingFailure = prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_worker_processing_failures_total", Help: "Total number of failures when processing runner worker", }, []string{"failure_type", "runner", "runner_name", "system_id"}, ) } func nextRunnerToReset(config *common.Config) (*common.RunnerConfig, time.Time) { var runnerToReset *common.RunnerConfig var runnerResetTime time.Time for _, runner := range config.Runners { if runner.TokenExpiresAt.IsZero() { continue } expirationInterval := runner.TokenExpiresAt.Sub(runner.TokenObtainedAt) resetTime := runner.TokenObtainedAt.Add( time.Duration(common.TokenResetIntervalFactor * float64(expirationInterval.Nanoseconds())), ) if runnerToReset == nil || resetTime.Before(runnerResetTime) { runnerToReset = runner runnerResetTime = resetTime } } return runnerToReset, runnerResetTime } func (mr *RunCommand) resetRunnerTokens() { for mr.resetOneRunnerToken() { // Handling runner authentication token resetting - one by one - until mr.runFinished // reports that mr.run() have been finished } } //nolint:gocognit func (mr *RunCommand) resetOneRunnerToken() bool { var task runAtTask runnerResetCh := make(chan *common.RunnerConfig) config := mr.configfile.Config() runnerToReset, runnerResetTime := nextRunnerToReset(config) if runnerToReset != nil { task = mr.runAt(runnerResetTime, func() { runnerResetCh <- runnerToReset }) } select { case runner := <-runnerResetCh: // When the FF is enabled, the token is not reset, however, a message is logged to warn the user // that his token is about to expire if runner.IsFeatureFlagOn(featureflags.DisableAutomaticTokenRotation) { mr.log().Warningln(fmt.Printf( "Automatic token rotation is disabled for runner: %s-%s. Your token is about to expire", runner.ShortDescription(), runner.GetSystemID(), )) return false } var updated bool if err := mr.configfile.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { runnerCfg, err := cfg.RunnerByToken(runner.Token) if err != nil { return fmt.Errorf("resetting token for runner: %w", err) } updated = common.ResetToken(mr.network, runnerCfg, runnerCfg.GetSystemID(), "") return nil })); err != nil { mr.log().WithError(err).Errorln("Failed to load config (token reset)") } if updated { if err := mr.configfile.Save(); err != nil { mr.log().WithError(err).Errorln("Failed to save config") } } case <-mr.runFinished: if task != nil { task.cancel() } return false case <-mr.configReloaded: if task != nil { task.cancel() } } return true } func (mr *RunCommand) reloadConfig() error { if err := mr.configfile.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { cfg.User = mr.User return nil })); err != nil { return err } // Set log level if err := mr.updateLoggingConfiguration(); err != nil { return err } mr.reloadUsageLogger() config := mr.configfile.Config() mr.healthHelper.healthy = nil mr.log().Println("Configuration loaded") // Warn about legacy /ci URL suffix in runner configurations for _, runner := range config.Runners { runner.WarnOnLegacyCIURL() } mr.checkConfigConcurrency(config) if c, err := config.Masked(); err == nil { mr.log().Debugln(helpers.ToYAML(c)) } // initialize sentry slh := sentry.LogHook{} if config.SentryDSN != nil { var err error slh, err = sentry.NewLogHook(*config.SentryDSN) if err != nil { mr.log().WithError(err).Errorln("Sentry failure") } } mr.sentryLogHookMutex.Lock() mr.sentryLogHook = slh mr.sentryLogHookMutex.Unlock() if config.ConnectionMaxAge != nil && mr.network != nil { mr.networkMutex.Lock() mr.network.SetConnectionMaxAge(*config.ConnectionMaxAge) mr.networkMutex.Unlock() } mr.configReloaded <- 1 return nil } func (mr *RunCommand) updateLoggingConfiguration() error { reloadNeeded := false config := mr.configfile.Config() level := "info" if config.LogLevel != nil { level = *config.LogLevel } if !log.Configuration().IsLevelSetWithCli() { err := log.Configuration().SetLevel(level) if err != nil { return err } reloadNeeded = true } format := log.FormatRunner if config.LogFormat != nil { format = *config.LogFormat } if !log.Configuration().IsFormatSetWithCli() { err := log.Configuration().SetFormat(format) if err != nil { return err } reloadNeeded = true } if reloadNeeded { log.Configuration().ReloadConfiguration() } return nil } func (mr *RunCommand) reloadUsageLogger() { if mr.usageLogger != nil { mr.log().Debug("Closing existing usage logger storage") err := mr.usageLogger.Close() if err != nil { mr.log().WithError(err).Error("Failed to close existing usage logger storage") } } config := mr.configfile.Config() if config.Experimental == nil || !config.Experimental.UsageLogger.Enabled { mr.usageLogger = nil mr.log().Info("Usage logger disabled") return } ulConfig := config.Experimental.UsageLogger logDir := ulConfig.LogDir if logDir == "" { logDir = filepath.Join(filepath.Dir(mr.ConfigFile), "usage-log") } options := []logrotate.Option{ logrotate.WithLogDirectory(logDir), } storageOptions := []usage_log.Option{ usage_log.WithLabels(ulConfig.Labels), } logFields := logrus.Fields{ "log_dir": logDir, } if ulConfig.MaxBackupFiles != nil && *ulConfig.MaxBackupFiles > 0 { options = append(options, logrotate.WithMaxBackupFiles(*ulConfig.MaxBackupFiles)) logFields["max_backup_files"] = *ulConfig.MaxBackupFiles } if ulConfig.MaxRotationAge != nil && ulConfig.MaxRotationAge.Nanoseconds() > 0 { options = append(options, logrotate.WithMaxRotationAge(*ulConfig.MaxRotationAge)) logFields["max_rotation_age"] = *ulConfig.MaxRotationAge } mr.log().WithFields(logFields).Info("Usage logger enabled") mr.usageLogger = usage_log.NewStorage(logrotate.New(options...), storageOptions...) } // run is the main method of RunCommand. It's started asynchronously by services support // through `Start` method and is responsible for initializing all goroutines handling // concurrent, multi-runner execution of jobs. // When mr.stopSignal is broadcasted (after `Stop` is called by services support) // this method waits for all workers to be terminated and closes the mr.runFinished // channel, which is the signal that the command was properly terminated (this is the only // valid, properly terminated exit flow for `gitlab-runner run`). func (mr *RunCommand) run() { mr.setupMetricsAndDebugServer() mr.setupSessionServer() go mr.resetRunnerTokens() runners := make(chan *common.RunnerConfig) go mr.feedRunners(runners) mr.initUsedExecutorProviders() signal.Notify(mr.stopSignals, syscall.SIGQUIT, syscall.SIGTERM, os.Interrupt) signal.Notify(mr.reloadSignal, syscall.SIGHUP) startWorker := make(chan int) stopWorker := make(chan bool) go mr.startWorkers(startWorker, stopWorker, runners) workerIndex := 0 // Update number of workers and reload configuration. // Exits when mr.runInterruptSignal receives a signal. for mr.stopSignal == nil { signaled := mr.updateWorkers(&workerIndex, startWorker, stopWorker) if signaled != nil { break } signaled = mr.updateConfig() if signaled != nil { break } } // Wait for workers to shut down mr.stopWorkers(stopWorker) mr.log().Info("All workers stopped.") mr.shutdownUsedExecutorProviders() mr.log().Info("All executor providers shut down.") close(mr.runFinished) mr.log().Info("Can exit now!") } func (mr *RunCommand) initUsedExecutorProviders() { mr.log().Info("Initializing executor providers") for _, provider := range mr.executorProviders.All() { managedProvider, ok := provider.(common.ManagedExecutorProvider) if ok { managedProvider.Init() } } } func (mr *RunCommand) shutdownUsedExecutorProviders() { config := mr.configfile.Config() shutdownTimeout := config.GetShutdownTimeout() logger := mr.log().WithField("shutdown-timeout", shutdownTimeout) logger.Info("Shutting down executor providers") ctx, cancelFn := context.WithTimeout(context.Background(), shutdownTimeout) defer cancelFn() wg := new(sync.WaitGroup) for _, provider := range mr.executorProviders.All() { managedProvider, ok := provider.(common.ManagedExecutorProvider) if ok { wg.Add(1) go func(p common.ManagedExecutorProvider) { defer wg.Done() p.Shutdown(ctx, config) }(managedProvider) } } wg.Wait() if ctx.Err() != nil { logger.Warn("Executor providers shutdown timeout exceeded") } } func listenAddress(cfg *common.Config, address string) (string, error) { if address == "" { address = cfg.ListenAddress } if address == "" { return "", nil } _, port, err := net.SplitHostPort(address) if err != nil && !strings.Contains(err.Error(), "missing port in address") { return "", err } if port == "" { return fmt.Sprintf("%s:%d", address, common.DefaultMetricsServerPort), nil } return address, nil } func (mr *RunCommand) setupMetricsAndDebugServer() { listenAddress, err := listenAddress(mr.configfile.Config(), mr.ListenAddress) if err != nil { mr.log().Errorf("invalid listen address: %s", err.Error()) return } if listenAddress == "" { mr.log().Info("listen_address not defined, metrics & debug endpoints disabled") return } // We separate out the listener creation here so that we can return an error if // the provided address is invalid or there is some other listener error. listener, err := net.Listen("tcp", listenAddress) if err != nil { mr.log().WithError(err).Fatal("Failed to create listener for metrics server") } mux := http.NewServeMux() go func() { err := http.Serve(listener, mux) if err != nil { mr.log().WithError(err).Fatal("Metrics server terminated") } }() mr.serveMetrics(mux) mr.serveDebugData(mux) mr.servePprof(mux) mr.log(). WithField("address", listenAddress). Info("Metrics server listening") } func (mr *RunCommand) serveMetrics(mux *http.ServeMux) { registry := prometheus.NewRegistry() // Metrics about the runner's business logic. registry.MustRegister(&mr.buildsHelper) // Metrics about runner workers health registry.MustRegister(&mr.healthHelper) // Metrics about configuration file accessing registry.MustRegister(mr.configfile.AccessCollector()) registry.MustRegister(mr) // Metrics about job inputs interpolation registry.MustRegister(mr.inputsMetricsCollector) // Metrics about API connections registry.MustRegister(mr.apiRequestsCollector) // Metrics about jobs failures registry.MustRegister(mr.failuresCollector) // Metrics about catched errors registry.MustRegister(&mr.prometheusLogHook) // Metrics about the program's build version. registry.MustRegister(common.AppVersion.NewMetricsCollector()) // Go-specific metrics about the process (GC stats, goroutines, etc.). registry.MustRegister(collectors.NewGoCollector()) // Go-unrelated process metrics (memory usage, file descriptors, etc.). registry.MustRegister(collectors.NewProcessCollector(collectors.ProcessCollectorOpts{})) // Register all executor provider collectors for _, provider := range mr.executorProviders.All() { if collector, ok := provider.(prometheus.Collector); ok && collector != nil { registry.MustRegister(collector) } } // Register all cache adapter collectors for _, collector := range cache.Collectors() { registry.MustRegister(collector) } // restrictHTTPMethods should be used on all promhttp handlers // In this specific instance, the handler is uninstrumented, so isn't as // important. But in the future, if any other promhttp handlers are added // they too should be wrapped and restriced. // https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27194 mux.Handle( "/metrics", restrictHTTPMethods( promhttp.HandlerFor(registry, promhttp.HandlerOpts{}), http.MethodGet, http.MethodHead, ), ) } func (mr *RunCommand) serveDebugData(mux *http.ServeMux) { mux.HandleFunc("/debug/jobs/list", mr.buildsHelper.ListJobsHandler) } func (mr *RunCommand) servePprof(mux *http.ServeMux) { mux.HandleFunc("/debug/pprof/", pprof.Index) mux.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline) mux.HandleFunc("/debug/pprof/profile", pprof.Profile) mux.HandleFunc("/debug/pprof/symbol", pprof.Symbol) mux.HandleFunc("/debug/pprof/trace", pprof.Trace) } // restrictHTTPMethods wraps a http.Handler and returns a http.Handler that // restricts methods only to those provided. func restrictHTTPMethods(handler http.Handler, methods ...string) http.Handler { supported := map[string]struct{}{} for _, method := range methods { supported[method] = struct{}{} } return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if _, ok := supported[r.Method]; !ok { http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed) return } handler.ServeHTTP(w, r) }) } func (mr *RunCommand) setupSessionServer() { config := mr.configfile.Config() if config.SessionServer.ListenAddress == "" { mr.log().Info("[session_server].listen_address not defined, session endpoints disabled") return } // Create a wrapper function that handles the error from findSessionByURL findSessionWrapper := func(url string) *session.Session { sess, err := mr.buildsHelper.findSessionByURL(url) if err != nil { mr.log().WithError(err).WithField("url", url).Warn("Failed to find session by URL") return nil } return sess } var err error mr.sessionServer, err = session.NewServer( session.ServerConfig{ AdvertiseAddress: config.SessionServer.AdvertiseAddress, ListenAddress: config.SessionServer.ListenAddress, ShutdownTimeout: config.GetShutdownTimeout(), }, mr.log(), certificate.X509Generator{}, findSessionWrapper, ) if err != nil { mr.log().WithError(err).Fatal("Failed to create session server") } go func() { err := mr.sessionServer.Start() if err != nil { mr.log().WithError(err).Fatal("Session server terminated") } }() mr.log(). WithField("address", config.SessionServer.ListenAddress). Info("Session server listening") } // feedRunners works until a stopSignal was saved. // It is responsible for feeding the runners (workers) to channel, which // asynchronously ends with job requests being made and jobs being executed // by concurrent workers. // This is also the place where check interval is calculated and // applied. func (mr *RunCommand) feedRunners(runners chan *common.RunnerConfig) { for mr.stopSignal == nil { mr.log().Debugln("Feeding runners to channel") config := mr.configfile.Config() // If no runners wait full interval to test again if len(config.Runners) == 0 { time.Sleep(config.GetCheckInterval()) continue } interval := config.GetCheckInterval() / time.Duration(len(config.Runners)) // Feed runner with waiting exact amount of time for _, runner := range config.Runners { mr.feedRunner(runner, runners) time.Sleep(interval) } } mr.log(). WithField("StopSignal", mr.stopSignal). Debug("Stopping feeding runners to channel") } func (mr *RunCommand) feedRunner(runner *common.RunnerConfig, runners chan *common.RunnerConfig) { if !mr.healthHelper.isHealthy(runner) { mr.runnerWorkersFeedFailures.WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()).Inc() return } mr.runnerWorkersFeeds.WithLabelValues(runner.ShortDescription(), runner.Name, runner.GetSystemID()).Inc() mr.log().WithField("runner", runner.ShortDescription()).Debugln("Feeding runner to channel") runners <- runner } // startWorkers is responsible for starting the workers (up to the number // defined by `concurrent`) and assigning a runner processing method to them. func (mr *RunCommand) startWorkers(startWorker chan int, stopWorker chan bool, runners chan *common.RunnerConfig) { for mr.stopSignal == nil { id := <-startWorker go mr.processRunners(id, stopWorker, runners) } } // processRunners is responsible for processing a Runner on a worker (when received // a runner information sent to the channel by feedRunners) and for terminating the worker // (when received an information on stoWorker chan - provided by updateWorkers) func (mr *RunCommand) processRunners(id int, stopWorker chan bool, runners chan *common.RunnerConfig) { mr.log(). WithField("worker", id). Debugln("Starting worker") mr.runnerWorkerSlotOperations.WithLabelValues(workerSlotOperationStarted).Inc() for mr.stopSignal == nil { select { case runner := <-runners: err := mr.processRunner(id, runner, runners) if err != nil { logger := mr.log(). WithFields(logrus.Fields{ "runner": runner.ShortDescription(), "runner_name": runner.Name, "executor": runner.Executor, }).WithError(err) l, failureType := loggerAndFailureTypeFromError(logger, err) l("Failed to process runner") mr.runnerWorkerProcessingFailure. WithLabelValues(failureType, runner.ShortDescription(), runner.Name, runner.GetSystemID()). Inc() } case <-stopWorker: mr.log(). WithField("worker", id). Debugln("Stopping worker") mr.runnerWorkerSlotOperations.WithLabelValues(workerSlotOperationStopped).Inc() return } } <-stopWorker } func loggerAndFailureTypeFromError(logger logrus.FieldLogger, err error) (func(args ...interface{}), string) { var NoFreeExecutorError *common.NoFreeExecutorError if errors.As(err, &NoFreeExecutorError) { return logger.Debug, workerProcessingFailureNoFreeExecutor } var BuildError *common.BuildError if errors.As(err, &BuildError) { return logger.Debug, workerProcessingFailureJobFailure } return logger.Warn, workerProcessingFailureOther } // processRunner is responsible for handling one job on a specified runner. // First it acquires the Build to check if `limit` was met. If it's still in the capacity // it creates the debug session (for debug terminal), triggers a job request to configured // GitLab instance and finally creates and finishes the job. // To speed-up jobs handling before starting the job this method "requeues" the runner to another // worker (by feeding the channel normally handled by feedRunners). func (mr *RunCommand) processRunner(id int, runner *common.RunnerConfig, runners chan *common.RunnerConfig) error { runnerFields := logrus.Fields{ "runner": runner.ShortDescription(), "runner_name": runner.Name, } mr.log().WithFields(runnerFields).Debugln("Processing runner") provider := mr.executorProviders.GetByName(runner.Executor) if provider == nil { mr.log(). WithFields(runnerFields). Errorf("Executor %q is not known; marking Runner as unhealthy", runner.Executor) mr.healthHelper.markHealth(runner, false) return nil } mr.log().WithField("runner", runner.ShortDescription()).Debug("Acquiring job slot") if !mr.buildsHelper.acquireBuild(runner) { logrus.WithFields(runnerFields).WithField("worker", id).Debug("Failed to request job, runner limit met") return nil } defer mr.buildsHelper.releaseBuild(runner) // Acquire request for job // We must ensure that this is released after the job request, or earlier if there's an // error before the job request is made. mr.log().WithFields(runnerFields).Debug("Acquiring request slot") if !mr.buildsHelper.acquireRequest(runner) { mr.log().WithFields(runnerFields). Debugln("Failed to request job: 'request_concurrency' already reached, see https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-runners-section") return nil } mr.log().WithFields(runnerFields).Debug("Acquiring executor from provider") executorData, err := provider.Acquire(runner) if err != nil { // Release job request mr.buildsHelper.releaseRequest(runner, false) return fmt.Errorf("failed to update executor: %w", err) } defer provider.Release(runner, executorData) return mr.processBuildOnRunner(runner, runners, provider, executorData) } func (mr *RunCommand) processBuildOnRunner( runner *common.RunnerConfig, runners chan *common.RunnerConfig, provider common.ExecutorProvider, executorData common.ExecutorData, ) error { buildSession, sessionInfo, err := mr.createSession(provider) if err != nil { // Release job request mr.buildsHelper.releaseRequest(runner, false) return err } // Receive a new build trace, jobData, err := mr.requestJob(runner, sessionInfo) // Release job request mr.buildsHelper.releaseRequest(runner, jobData != nil) if err != nil || jobData == nil { return err } defer func() { mr.traceOutcome(trace, err) }() // Create a new build build, err := common.NewBuild(*jobData, runner, mr.abortBuilds, executorData, provider) if err != nil { return err } build.Session = buildSession build.ArtifactUploader = mr.network.UploadRawArtifacts trace.SetDebugModeEnabled(build.IsDebugModeEnabled()) tracingFeature := jobData.Features.Tracing tr, stop := tracer(mr.log(), tracingFeature) defer func() { stopErr := stop() if stopErr != nil { mr.log().WithError(stopErr).Warn("Error stopping trace provider") } }() ctx := tracerContext(context.Background(), mr.log(), tracingFeature) ctx, span := tr.Start(ctx, spanNameJobExecution) defer span.End() defer func() { span.SetAttributes(spanAttrJobStatus.String(build.CurrentState().String())) }() setJobSpanAttributes(span, build, runner) _ = ctx // we'll need it later // Add build to list of builds to assign numbers mr.buildsHelper.addBuild(build) fields := logrus.Fields{ "runner": runner.ShortDescription(), "runner_name": runner.Name, "job": build.ID, "pipeline_id": build.JobInfo.PipelineID, "project": build.JobInfo.ProjectID, "project_full_path": build.JobInfo.ProjectFullPath, "namespace_id": build.JobInfo.NamespaceID, "root_namespace_id": build.JobInfo.RootNamespaceID, "organization_id": build.JobInfo.OrganizationID, "gitlab_user_id": build.JobInfo.UserID, "repo_url": build.RepoCleanURL(), "time_in_queue_seconds": build.JobInfo.TimeInQueueSeconds, "queue_size": build.JobInfo.QueueSize, "queue_depth": build.JobInfo.QueueDepth, } if build.JobInfo.ScopedUserID != nil { fields["gitlab_scoped_user_id"] = *build.JobInfo.ScopedUserID } mr.log().WithFields(fields).Infoln("Added job to processing list") defer func() { if mr.buildsHelper.removeBuild(build) { mr.log().WithFields(fields).Infoln("Removed job from processing list") mr.usageLoggerStore(common.UsageLogRecordFrom(runner, build)) } }() if !runner.GetStrictCheckInterval() { // Process the same runner by different worker again // to speed up taking the builds mr.requeueRunner(runner, runners) } // Process a build return build.Run(mr.configfile.Config(), trace) } func (mr *RunCommand) traceOutcome(trace common.JobTrace, err error) { if err != nil { fmt.Fprintln(trace, err.Error()) logTerminationError( mr.log(), "Fail", trace.Fail(err, common.JobFailureData{Reason: common.RunnerSystemFailure}), ) return } logTerminationError(mr.log(), "Success", trace.Success()) } func logTerminationError(logger logrus.FieldLogger, name string, err error) { if err != nil { logger.WithError(err).Errorf("Job trace termination %q failed", name) } } func (mr *RunCommand) usageLoggerStore(record usage_log.Record) { if mr.usageLogger == nil { return } l := mr.log().WithField("job_url", record.Job.URL) l.Info("Storing usage log information") err := mr.usageLogger.Store(record) if err != nil { l.WithError(err).Error("Failed to store usage log information") } } // createSession checks if debug server is supported by configured executor and if the // debug server was configured. If both requirements are met, then it creates a debug session // that will be assigned to newly created job. func (mr *RunCommand) createSession(provider common.ExecutorProvider) (*session.Session, *common.SessionInfo, error) { var features common.FeaturesInfo if err := provider.GetFeatures(&features); err != nil { return nil, nil, err } if mr.sessionServer == nil || !features.Session { return nil, nil, nil } sess, err := session.NewSession(mr.log()) if err != nil { return nil, nil, err } sessionInfo := &common.SessionInfo{ URL: mr.sessionServer.AdvertiseAddress + sess.Endpoint, Certificate: string(mr.sessionServer.CertificatePublicKey), Authorization: sess.Token, } return sess, sessionInfo, err } // requestJob will check if the runner can send another concurrent request to // GitLab, if not the return value is nil. func (mr *RunCommand) requestJob( runner *common.RunnerConfig, sessionInfo *common.SessionInfo, ) (common.JobTrace, *spec.Job, error) { jobData, healthy := mr.doJobRequest(context.Background(), runner, sessionInfo) mr.healthHelper.markHealth(runner, healthy) if jobData == nil { return nil, nil, nil } // Inject metrics collector into JobInputs jobData.Inputs.SetMetricsCollector(mr.inputsMetricsCollector) // Make sure to always close output jobCredentials := &common.JobCredentials{ ID: jobData.ID, Token: jobData.Token, } trace, err := mr.network.ProcessJob(*runner, jobCredentials) if err != nil { jobInfo := common.UpdateJobInfo{ ID: jobCredentials.ID, State: common.Failed, FailureReason: common.RunnerSystemFailure, } // send failure once mr.network.UpdateJob(*runner, jobCredentials, jobInfo) return nil, nil, err } if err := errors.Join(jobData.UnsupportedOptions(), jobData.ValidateStepsJobRequest(mr.executorSupportsNativeSteps(runner))); err != nil { _, _ = trace.Write([]byte(err.Error() + "\n")) err = trace.Fail(err, common.JobFailureData{ Reason: common.RunnerSystemFailure, ExitCode: common.ExitCodeUnsupportedOptions, }) logTerminationError(mr.log(), "Fail", err) return nil, nil, err } trace.SetFailuresCollector(mr.failuresCollector) updateResult := mr.network.UpdateJob(*runner, jobCredentials, common.UpdateJobInfo{ ID: jobCredentials.ID, State: common.Running, }) if updateResult.State == common.UpdateAbort || updateResult.CancelRequested { trace.Finish() return nil, nil, nil } return trace, jobData, nil } func (mr *RunCommand) executorSupportsNativeSteps(runnerConfig *common.RunnerConfig) bool { netCli, ok := mr.network.(*network.GitLabClient) return ok && netCli.ExecutorSupportsNativeSteps(*runnerConfig) } // doJobRequest will execute the request for a new job, respecting an interruption // caused by interrupt signals or process execution finalization func (mr *RunCommand) doJobRequest( ctx context.Context, runner *common.RunnerConfig, sessionInfo *common.SessionInfo, ) (*spec.Job, bool) { // Terminate opened requests to GitLab when interrupt signal // is broadcast. ctx, cancelFn := context.WithCancel(ctx) defer cancelFn() go func() { select { case <-mr.runInterruptSignal: cancelFn() case <-mr.runFinished: cancelFn() case <-ctx.Done(): } }() return mr.network.RequestJob(ctx, *runner, sessionInfo) } // requeueRunner feeds the runners channel in a non-blocking way. This replicates the // behavior of feedRunners and speeds-up jobs handling. But if the channel is full, the // method just exits without blocking. func (mr *RunCommand) requeueRunner(runner *common.RunnerConfig, runners chan *common.RunnerConfig) { runnerLog := mr.log().WithField("runner", runner.ShortDescription()).WithField("runner_name", runner.Name) select { case runners <- runner: runnerLog.Debugln("Requeued the runner") default: runnerLog.Debugln("Failed to requeue the runner") } } // updateWorkers, called periodically from run() is responsible for scaling the pool // of workers. By worker we don't understand a `[[runners]]` entry, but a "slot" that will // use one of the runners to request and handle a job. // The size of the workers pool is controlled by `concurrent` setting. This method is responsible // for the fact that `concurrent` defines the upper number of jobs that can be concurrently handled // by GitLab Runner process. func (mr *RunCommand) updateWorkers(workerIndex *int, startWorker chan int, stopWorker chan bool) os.Signal { config := mr.configfile.Config() concurrentLimit := config.Concurrent if concurrentLimit < 1 { mr.log().Fatalln(fmt.Printf( "Current configuration 'concurrent = %d' means that no jobs will be processed, see https://docs.gitlab.com/runner/configuration/advanced-configuration.html#the-global-section", concurrentLimit, )) } for mr.currentWorkers > concurrentLimit { // Too many workers. Trigger stop on one of them // or exit if termination signal was broadcasted. select { case stopWorker <- true: case signaled := <-mr.runInterruptSignal: return signaled } mr.currentWorkers-- mr.runnerWorkerSlots.Set(float64(mr.currentWorkers)) } for mr.currentWorkers < concurrentLimit { // Too few workers. Trigger a creation of a new one // or exit if termination signal was broadcasted. select { case startWorker <- *workerIndex: case signaled := <-mr.runInterruptSignal: return signaled } mr.currentWorkers++ mr.runnerWorkerSlots.Set(float64(mr.currentWorkers)) *workerIndex++ } return nil } func (mr *RunCommand) stopWorkers(stopWorker chan bool) { for mr.currentWorkers > 0 { stopWorker <- true mr.currentWorkers-- mr.runnerWorkerSlots.Set(float64(mr.currentWorkers)) } } func (mr *RunCommand) updateConfig() os.Signal { select { case <-time.After(mr.reloadConfigInterval): err := mr.checkConfig() if err != nil { mr.log().Errorln("Failed to load config", err) } case <-mr.reloadSignal: err := mr.reloadConfig() if err != nil { mr.log().Errorln("Failed to load config", err) } case signaled := <-mr.runInterruptSignal: return signaled } return nil } func (mr *RunCommand) checkConfig() (err error) { info, err := os.Stat(mr.ConfigFile) if err != nil { return err } config := mr.configfile.Config() if !config.ModTime.Before(info.ModTime()) { return nil } err = mr.reloadConfig() if err != nil { mr.log().Errorln("Failed to load config", err) // don't reload the same file config.ModTime = info.ModTime() return } return nil } // Stop is the method implementing `github.com/kardianos/service`.`Interface` // interface. It's responsible for triggering the process stop. // First it starts a goroutine that starts broadcasting the interrupt signal (used to stop // workers scaling goroutine). // Next it triggers graceful shutdown, which will be handled only if a proper signal is used. // At the end it triggers the forceful shutdown, which handles the forceful the process termination. func (mr *RunCommand) Stop(_ service.Service) error { if mr.stopSignal == nil { mr.stopSignal = os.Interrupt } go mr.interruptRun() defer func() { if mr.sessionServer != nil { mr.sessionServer.Close() } }() // On Windows, we convert SIGTERM and SIGINT signals into a SIGQUIT. // // This enforces *graceful* termination on the first signal received, and a forceful shutdown // on the second. // // This slightly differs from other operating systems. On other systems, receiving a SIGQUIT // works the same way (gracefully) but receiving a SIGTERM and SIGQUIT always results // in an immediate forceful shutdown. // // This handling has to be different as SIGQUIT is not a signal the os/signal package translates // any Windows control concepts to. if runtime.GOOS == "windows" { mr.stopSignal = syscall.SIGQUIT } err := mr.handleGracefulShutdown() if err == nil { return nil } mr.log(). WithError(err). Warning(`Graceful shutdown not finished properly. To gracefully clean up running plugins please use SIGQUIT (ctrl-\) instead of SIGINT (ctrl-c)`) err = mr.handleForcefulShutdown() if err == nil { return nil } mr.log(). WithError(err). Warning("Forceful shutdown not finished properly") mr.usageLoggerClose() return err } // interruptRun broadcasts interrupt signal, which exits the workers // scaling goroutine. func (mr *RunCommand) interruptRun() { mr.log().Debug("Broadcasting interrupt signal") // Pump interrupt signal for { mr.runInterruptSignal <- mr.stopSignal } } // handleGracefulShutdown is responsible for handling the "graceful" strategy of exiting. // It's executed only when specific signal is used to terminate the process. // At this moment feedRunners() should exit and workers scaling is being terminated. // This means that new jobs will be not requested. handleGracefulShutdown() will ensure that // the process will not exit until `mr.runFinished` is closed, so all jobs were finished and // all workers terminated. It may however exit if another signal - other than the gracefulShutdown // signal - is received. func (mr *RunCommand) handleGracefulShutdown() error { // We wait till we have a SIGQUIT for mr.stopSignal == syscall.SIGQUIT { mr.log(). WithField("StopSignal", mr.stopSignal). Warning("Starting graceful shutdown, waiting for builds to finish") // Wait for other signals to finish builds select { case mr.stopSignal = <-mr.stopSignals: // We received a new signal mr.log().WithField("stop-signal", mr.stopSignal).Warning("[handleGracefulShutdown] received stop signal") case <-mr.runFinished: // Everything finished we can exit now return nil } } return fmt.Errorf("received stop signal: %v", mr.stopSignal) } // handleForcefulShutdown is executed if handleGracefulShutdown exited with an error // (which means that a signal forcing shutdown was used instead of the signal // specific for graceful shutdown). // It calls mr.abortAllBuilds which will broadcast abort signal which finally // ends with jobs termination. // Next it waits for one of the following events: // 1. Another signal was sent to process, which is handled as force exit and // triggers exit of the method and finally process termination without // waiting for anything else. // 2. ShutdownTimeout is exceeded. If waiting for shutdown will take more than // defined time, the process will be forceful terminated just like in the // case when second signal is sent. // 3. mr.runFinished was closed, which means that all termination was done // properly. // // After this method exits, Stop returns it error and finally the // `github.com/kardianos/service` service mechanism will finish // process execution. func (mr *RunCommand) handleForcefulShutdown() error { mr.log(). WithField("shutdown-timeout", mr.configfile.Config().GetShutdownTimeout()). WithField("StopSignal", mr.stopSignal). Warning("Starting forceful shutdown") go mr.abortAllBuilds() // Wait for graceful shutdown or abort after timeout for { select { case mr.stopSignal = <-mr.stopSignals: mr.log().WithField("stop-signal", mr.stopSignal).Warning("[handleForcefulShutdown] received stop signal") return fmt.Errorf("forced exit with stop signal: %v", mr.stopSignal) case <-time.After(mr.configfile.Config().GetShutdownTimeout()): return errors.New("shutdown timed out") case <-mr.runFinished: // Everything finished we can exit now return nil } } } // abortAllBuilds broadcasts abort signal, which ends with all currently executed // jobs being interrupted and terminated. func (mr *RunCommand) abortAllBuilds() { mr.log().Debug("Broadcasting job abort signal") // Pump signal to abort all current builds for { mr.abortBuilds <- mr.stopSignal } } func (mr *RunCommand) usageLoggerClose() { if mr.usageLogger != nil { err := mr.usageLogger.Close() mr.usageLogger = nil mr.log().WithError(err).Error("Closing usage logger") } } func (mr *RunCommand) Execute(_ *cli.Context) { err := process.EnsureSubprocessTerminationOnExit() if err != nil { logrus.WithError(err).Warn("Failed to wrap process in job object") } mr.configfile = configfile.New(mr.ConfigFile, configfile.WithAccessCollector()) svcConfig := &service.Config{ Name: mr.ServiceName, DisplayName: mr.ServiceName, Description: defaultDescription, Arguments: []string{"run"}, Option: service.KeyValue{ "RunWait": mr.runWait, }, } svc, err := service_helpers.New(mr, svcConfig) if err != nil { logrus.WithError(err). Fatalln("Service creation failed") } if mr.Syslog { log.SetSystemLogger(logrus.StandardLogger(), svc) } mr.sentryLogHookMutex.Lock() logrus.AddHook(&mr.sentryLogHook) mr.sentryLogHookMutex.Unlock() logrus.AddHook(&mr.prometheusLogHook) err = svc.Run() if err != nil { logrus.WithError(err). Fatal("Service run failed") } } // runWait is the blocking mechanism for `github.com/kardianos/service` // service. It's started after Start exited and should block the control flow. When it exits, // then the Stop is executed and service shutdown should be handled. // For Runner it waits for the stopSignal to be received by the process. When it will happen, // it's saved in mr.stopSignal and runWait() exits, triggering the shutdown handling. func (mr *RunCommand) runWait() { mr.log().Debugln("Waiting for stop signal") // Save the stop signal and exit to execute Stop() stopSignal := <-mr.stopSignals mr.stopSignal = stopSignal mr.log().WithField("stop-signal", stopSignal).Warning("[runWait] received stop signal") } // Describe implements prometheus.Collector. func (mr *RunCommand) Describe(ch chan<- *prometheus.Desc) { ch <- concurrentDesc ch <- limitDesc mr.runnerWorkersFeeds.Describe(ch) mr.runnerWorkersFeedFailures.Describe(ch) mr.runnerWorkerSlots.Describe(ch) mr.runnerWorkerSlotOperations.Describe(ch) mr.runnerWorkerProcessingFailure.Describe(ch) } // Collect implements prometheus.Collector. func (mr *RunCommand) Collect(ch chan<- prometheus.Metric) { config := mr.configfile.Config() ch <- prometheus.MustNewConstMetric( concurrentDesc, prometheus.GaugeValue, float64(config.Concurrent), ) for _, runner := range config.Runners { ch <- prometheus.MustNewConstMetric( limitDesc, prometheus.GaugeValue, float64(runner.Limit), runner.ShortDescription(), runner.Name, runner.SystemID, ) } mr.runnerWorkersFeeds.Collect(ch) mr.runnerWorkersFeedFailures.Collect(ch) mr.runnerWorkerSlots.Collect(ch) mr.runnerWorkerSlotOperations.Collect(ch) mr.runnerWorkerProcessingFailure.Collect(ch) } func (mr *RunCommand) checkConfigConcurrency(config *common.Config) { var warnings []string var solutions []string if config.Concurrent < len(config.Runners) { warnings = append(warnings, fmt.Sprintf( "Worker starvation bottleneck: 'concurrent' setting (%d) is less than number of runners (%d)", config.Concurrent, len(config.Runners))) solutions = append(solutions, fmt.Sprintf( "Increase 'concurrent' to at least %d (current: %d)", len(config.Runners)+1, config.Concurrent)) } var lowRequestConcurrencyRunners int var restrictiveRunners int for _, runner := range config.Runners { if runner.GetRequestConcurrency() == 1 { lowRequestConcurrencyRunners++ } if runner.Limit > 0 && runner.Limit <= 2 && runner.GetRequestConcurrency() == 1 { restrictiveRunners++ } } if lowRequestConcurrencyRunners > 0 { warnings = append(warnings, fmt.Sprintf( "Request bottleneck: %d runners have request_concurrency=1, causing job delays during long polling", lowRequestConcurrencyRunners)) solutions = append(solutions, fmt.Sprintf( "Increase 'request_concurrency' to 2-4 for %d runners currently using request_concurrency=1", lowRequestConcurrencyRunners)) } if restrictiveRunners > 0 { warnings = append(warnings, fmt.Sprintf( "Build limit bottleneck: %d runners have low 'limit' settings (≤2) with request_concurrency=1", restrictiveRunners)) solutions = append(solutions, fmt.Sprintf( "For %d runners with low limits: either increase 'limit' to 5+ or increase 'request_concurrency' to 2+", restrictiveRunners)) } if len(warnings) > 0 { warningMsg := "CONFIGURATION: Long polling issues detected.\n" warningMsg += "Issues found:\n" for _, warning := range warnings { warningMsg += " - " + warning + "\n" } warningMsg += "This can cause job delays matching your GitLab instance's long polling timeout.\n" warningMsg += "Recommended solutions:\n" for i, solution := range solutions { warningMsg += fmt.Sprintf(" %d. %s\n", i+1, solution) } warningMsg += "Note: The 'FF_USE_ADAPTIVE_REQUEST_CONCURRENCY' feature flag can help automatically adjust request_concurrency based on workload.\n" warningMsg += "This message will be printed each time the configuration is reloaded if the issues persist.\n" warningMsg += "See documentation: https://docs.gitlab.com/runner/configuration/advanced-configuration.html#long-polling-issues" mr.log().Warning(warningMsg) } } ================================================ FILE: commands/multi_test.go ================================================ //go:build !integration package commands import ( "context" "fmt" "io" "os" "path/filepath" "strings" "sync" "sync/atomic" "testing" "time" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" helper_test "gitlab.com/gitlab-org/gitlab-runner/helpers/test" "gitlab.com/gitlab-org/gitlab-runner/log/test" ) func TestProcessRunner_BuildLimit(t *testing.T) { hook, cleanup := test.NewHook() defer cleanup() logrus.SetLevel(logrus.DebugLevel) logrus.SetOutput(io.Discard) cfg := common.RunnerConfig{ Limit: 2, RequestConcurrency: 10, RunnerSettings: common.RunnerSettings{ Executor: "multi-runner-build-limit", }, } mJobTrace := common.NewMockLightJobTrace(t) mJobTrace.On("SetFailuresCollector", mock.Anything) mJobTrace.On("IsStdout").Return(false) mJobTrace.On("SetCancelFunc", mock.Anything) mJobTrace.On("SetAbortFunc", mock.Anything) mJobTrace.On("SetDebugModeEnabled", mock.Anything) mJobTrace.On("Success").Return(nil) mNetwork := common.NewMockNetwork(t) mNetwork.On("RequestJob", mock.Anything, mock.Anything, mock.Anything).Return(func(ctx context.Context, config common.RunnerConfig, sessionInfo *common.SessionInfo) (*spec.Job, bool) { return &spec.Job{ ID: 1, Steps: []spec.Step{ { Name: "sleep", Script: spec.StepScript{"sleep 10"}, Timeout: 15, When: "", AllowFailure: false, }, }, }, true }) mNetwork.On("UpdateJob", mock.Anything, mock.Anything, mock.Anything).Return(common.UpdateJobResult{State: common.UpdateSucceeded}) mNetwork.On("ProcessJob", mock.Anything, mock.Anything).Return(mJobTrace, nil) var runningBuilds uint32 e := common.NewMockExecutor(t) e.On("Prepare", mock.Anything, mock.Anything, mock.Anything).Return(nil) e.On("Cleanup").Maybe() e.On("Shell").Return(&common.ShellScriptInfo{Shell: "script-shell"}) e.On("Finish", mock.Anything).Maybe() e.On("Run", mock.Anything).Run(func(args mock.Arguments) { atomic.AddUint32(&runningBuilds, 1) // Simulate work to fill up build queue. time.Sleep(100 * time.Millisecond) }).Return(nil) p := common.NewMockExecutorProvider(t) p.On("Acquire", mock.Anything).Return(nil, nil) p.On("Release", mock.Anything, mock.Anything).Return(nil).Maybe() p.On("GetFeatures", mock.Anything).Return(nil) p.On("Create").Return(e) cmd := RunCommand{ network: mNetwork, executorProviders: executors.NewProviderRegistry(map[string]common.ExecutorProvider{"multi-runner-build-limit": p}), buildsHelper: newBuildsHelper(), configfile: configfile.New("", configfile.WithExistingConfig( &common.Config{User: "git"}, ), configfile.WithSystemID(common.UnknownSystemID)), } runners := make(chan *common.RunnerConfig) cmd.buildsHelper.getRunnerCounter(&cfg).adaptiveConcurrencyLimit = 100 // Start concurrent jobs wg := sync.WaitGroup{} wg.Add(3) for i := 0; i < 3; i++ { go func(i int) { defer wg.Done() err := cmd.processRunner(i, &cfg, runners) assert.NoError(t, err) }(i) } // Wait until at least two builds have started. for atomic.LoadUint32(&runningBuilds) < 2 { time.Sleep(10 * time.Millisecond) } // Wait for all builds to finish. wg.Wait() limitMetCount := 0 for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, "runner limit met") { limitMetCount++ } } assert.Equal(t, 1, limitMetCount) } func TestRunCommand_doJobRequest(t *testing.T) { returnedJob := new(spec.Job) waitForContext := func(ctx context.Context) { <-ctx.Done() } tests := map[string]struct { requestJob func(ctx context.Context) passSignal func(c *RunCommand) expectedContextTimeout bool }{ "requestJob returns immediately": { requestJob: func(_ context.Context) {}, passSignal: func(_ *RunCommand) {}, expectedContextTimeout: false, }, "requestJob hangs indefinitely": { requestJob: waitForContext, passSignal: func(_ *RunCommand) {}, expectedContextTimeout: true, }, "requestJob interrupted by interrupt signal": { requestJob: waitForContext, passSignal: func(c *RunCommand) { c.runInterruptSignal <- os.Interrupt }, expectedContextTimeout: false, }, "runFinished signal is passed": { requestJob: waitForContext, passSignal: func(c *RunCommand) { close(c.runFinished) }, expectedContextTimeout: false, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { runner := new(common.RunnerConfig) network := common.NewMockNetwork(t) network.On("RequestJob", mock.Anything, *runner, mock.Anything). Run(func(args mock.Arguments) { ctx, ok := args.Get(0).(context.Context) require.True(t, ok) tt.requestJob(ctx) }). Return(returnedJob, true). Once() c := &RunCommand{ network: network, runInterruptSignal: make(chan os.Signal), runFinished: make(chan bool), } ctx, cancelFn := context.WithTimeout(t.Context(), 1*time.Second) defer cancelFn() go tt.passSignal(c) job, _ := c.doJobRequest(ctx, runner, nil) assert.Equal(t, returnedJob, job) if tt.expectedContextTimeout { assert.ErrorIs(t, ctx.Err(), context.DeadlineExceeded) return } assert.NoError(t, ctx.Err()) }) } } func TestRunCommand_nextRunnerToReset(t *testing.T) { testCases := map[string]struct { runners []common.RunnerCredentials expectedIndex int expectedResetTime time.Time }{ "no runners": { runners: []common.RunnerCredentials{}, expectedIndex: -1, expectedResetTime: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, "no expiration time": { runners: []common.RunnerCredentials{ { ID: 1, TokenExpiresAt: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, }, expectedIndex: -1, expectedResetTime: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, "same expiration time": { runners: []common.RunnerCredentials{ { ID: 1, TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 5, 0, 0, 0, 0, time.UTC), }, { ID: 2, TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 5, 0, 0, 0, 0, time.UTC), }, }, expectedIndex: 0, expectedResetTime: time.Date(2022, 1, 4, 0, 0, 0, 0, time.UTC), }, "different expiration time": { runners: []common.RunnerCredentials{ { ID: 1, TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, { ID: 2, TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 5, 0, 0, 0, 0, time.UTC), }, }, expectedIndex: 1, expectedResetTime: time.Date(2022, 1, 4, 0, 0, 0, 0, time.UTC), }, "different obtained time": { runners: []common.RunnerCredentials{ { ID: 1, TokenObtainedAt: time.Date(2022, 1, 5, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, { ID: 2, TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, expectedIndex: 1, expectedResetTime: time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), }, "old configuration": { runners: []common.RunnerCredentials{ { URL: "https://gitlab1.example.com/", // No ID nor time values - replicates entry from before the change was added }, { URL: "https://gitlab2.example.com/", // No ID nor time values - replicates entry from before the change was added }, }, expectedIndex: -1, expectedResetTime: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { config := common.NewConfig() for _, r := range tc.runners { config.Runners = append(config.Runners, &common.RunnerConfig{ RunnerCredentials: r, }) } runnerToReset, resetTime := nextRunnerToReset(config) if tc.expectedIndex < 0 { assert.Nil(t, runnerToReset) assert.True(t, resetTime.IsZero()) return } assert.Equal(t, tc.runners[tc.expectedIndex], runnerToReset.RunnerCredentials) assert.Equal(t, tc.expectedResetTime, resetTime) }) } } type runAtCall struct { time time.Time callback func() task *runAtTaskMock } type resetTokenRequest struct { runner common.RunnerConfig systemID string } type resetRunnerTokenTestController struct { runCommand RunCommand eventChan chan interface{} waitGroup sync.WaitGroup networkMock *common.MockNetwork configSaverMock *common.MockConfigSaver } type runAtTaskMock struct { finished bool cancelled bool } func (t *runAtTaskMock) cancel() { t.cancelled = true } func newResetRunnerTokenTestController(t *testing.T) *resetRunnerTokenTestController { networkMock := common.NewMockNetwork(t) configSaverMock := common.NewMockConfigSaver(t) configPath := filepath.Join(t.TempDir(), "config.toml") data := &resetRunnerTokenTestController{ runCommand: RunCommand{ configfile: configfile.New(configPath, configfile.WithExistingConfig( common.NewConfigWithSaver(configSaverMock), ), configfile.WithSystemID(common.UnknownSystemID)), runAt: runAt, runFinished: make(chan bool), configReloaded: make(chan int), network: networkMock, }, eventChan: make(chan interface{}), networkMock: networkMock, configSaverMock: configSaverMock, } data.runCommand.runAt = data.runAt return data } // runAt implements the RunCommand.runAt interface and allows to integrate the call // done in context of token resetting with the test implementation func (c *resetRunnerTokenTestController) runAt(time time.Time, callback func()) runAtTask { task := runAtTaskMock{ finished: false, } c.eventChan <- runAtCall{ time: time, callback: callback, task: &task, } return &task } // mockResetToken should be run before the tested method call to ensure // that API call is properly mocked, required and feeds data needed for // further assertions // // Use only when this API call is expected. Otherwise - check assertResetTokenNotCalled func (c *resetRunnerTokenTestController) mockResetToken(runnerID int64, response *common.ResetTokenResponse) { c.networkMock. On( "ResetToken", mock.MatchedBy(func(runner common.RunnerConfig) bool { return runnerID == runner.ID }), common.UnknownSystemID, ). Return(func(runner common.RunnerConfig, systemID string) *common.ResetTokenResponse { // Sending is a blocking operation, so this blocks until the other thread receives it. c.eventChan <- resetTokenRequest{ runner: runner, systemID: systemID, } return response }). Once() } // mockConfigSave should be run before the tested method call to ensure // that configuration file save call is required // // Use only when save is expected. Otherwise - check assertConfigSaveNotCalled func (c *resetRunnerTokenTestController) mockConfigSave() { c.configSaverMock.On("Save", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { _ = os.WriteFile(args.Get(0).(string), args.Get(1).([]byte), 0o600) }).Return(nil).Once() } // awaitRunAtCall blocks on waiting for the RunCommand.runAt call (in context of token // resetting) to happen // // Returns details about the call for further assertions func (c *resetRunnerTokenTestController) awaitRunAtCall(t *testing.T) runAtCall { event := <-c.eventChan e := event.(runAtCall) require.NotNil(t, e) return e } // awaitResetTokenRequest blocks on waiting for the mocked API call for the token reset // to happen // // Returns reset token request details for further assertions func (c *resetRunnerTokenTestController) awaitResetTokenRequest(t *testing.T) resetTokenRequest { event := <-c.eventChan e := event.(resetTokenRequest) require.NotNil(t, e) return e } // handleRunAtCall asserts whether the call is the expected one and if yes - executed // the callback registered for it (so in this case - the call that schedules another // request for the token reset API) func (c *resetRunnerTokenTestController) handleRunAtCall(t *testing.T, time time.Time) { event := c.awaitRunAtCall(t) assert.Equal(t, time, event.time) event.callback() event.task.finished = true } // handleResetTokenRequest asserts whether the request to the API is the one expected // (basing on the ID and systemID of the Runner) // //nolint:unparam func (c *resetRunnerTokenTestController) handleResetTokenRequest(t *testing.T, runnerID int64, systemID string) { event := c.awaitResetTokenRequest(t) assert.Equal(t, runnerID, event.runner.ID) assert.Equal(t, systemID, event.systemID) } // pushToWaitGroup ensures that the callback function is executed in context // of a WaitGroup. This allows use to organise the test case flow to be executed // in the expected order func (c *resetRunnerTokenTestController) pushToWaitGroup(callback func()) { c.waitGroup.Add(1) go func() { callback() c.waitGroup.Done() }() } // stop simulates RunCommand interruption - the moment when run() is finished func (c *resetRunnerTokenTestController) stop() { c.runCommand.stopSignal = os.Interrupt close(c.runCommand.runFinished) } // reloadConfig simulates that configuration file update was discovered and that // it was reloaded (which normally is done by RunCommand in background) func (c *resetRunnerTokenTestController) reloadConfig() { c.runCommand.configReloaded <- 1 } // setRunners updates the test configuration with given runner credentials. // // It should be used as the test case initialisation and may be used to simulate // config change after reloading func (c *resetRunnerTokenTestController) setRunners(runners []common.RunnerCredentials) { _ = c.runCommand.configfile.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { var set []*common.RunnerConfig for _, runner := range runners { set = append(set, &common.RunnerConfig{ RunnerCredentials: runner, }) } cfg.Runners = set return nil })) // silently save changes to disk without going via mock saver := c.runCommand.configfile.Config().ConfigSaver c.runCommand.configfile.Config().ConfigSaver = nil defer func() { c.runCommand.configfile.Config().ConfigSaver = saver }() _ = c.runCommand.configfile.Save() } // wait stops execution until callbacks added currently to the WaitGroup // are done func (c *resetRunnerTokenTestController) wait() { c.waitGroup.Wait() } // finish ensures that channels used by the controller are closed func (c *resetRunnerTokenTestController) finish() { close(c.eventChan) } // assertConfigSaveNotCalled should be run after the tested method call to ensure // that configuration saving event was not executed // // Use only when configuration save is not expected. Otherwise - check mockConfigSave func (c *resetRunnerTokenTestController) assertConfigSaveNotCalled(t *testing.T) { c.configSaverMock.AssertNotCalled(t, "Save", mock.Anything, mock.Anything) } // assertResetTokenNotCalled should be run after the tested method call to ensure // that the network call to token reset API was not executed // // Use only when API call for token reset is not expected. Otherwise - check mockResetToken func (c *resetRunnerTokenTestController) assertResetTokenNotCalled(t *testing.T) { c.networkMock.AssertNotCalled(t, "ResetToken", mock.Anything, mock.Anything) } type resetRunnerTokenTestCase struct { runners []common.RunnerCredentials testProcedure func(t *testing.T, d *resetRunnerTokenTestController) } func TestRunCommand_resetOneRunnerToken(t *testing.T) { testCases := map[string]resetRunnerTokenTestCase{ "no runners stop": { runners: []common.RunnerCredentials{}, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.False(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) d.stop() d.wait() }, }, "no runners reload config": { runners: []common.RunnerCredentials{}, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.True(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) d.reloadConfig() d.wait() }, }, "one expiring runner": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(1, &common.ResetTokenResponse{ Token: "token2", TokenObtainedAt: time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 11, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() assert.True(t, d.runCommand.resetOneRunnerToken()) }) d.handleRunAtCall(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 1, common.UnknownSystemID) d.wait() runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token2", runner.Token) assert.Equal(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 11, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one non-expiring runner": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), // 0001-01-01T00:00:00.0 is the "zero" value of time.Time and is used // by resetting mechanism to recognize runners that don't have expiration time assigned TokenExpiresAt: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.False(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) d.stop() d.wait() }, }, "two expiring runners": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1_1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, { ID: 2, Token: "token2_1", TokenObtainedAt: time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 10, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(1, &common.ResetTokenResponse{ Token: "token1_2", TokenObtainedAt: time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 11, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() assert.True(t, d.runCommand.resetOneRunnerToken()) }) d.handleRunAtCall(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 1, common.UnknownSystemID) d.wait() d.pushToWaitGroup(func() { d.mockResetToken(2, &common.ResetTokenResponse{ Token: "token2_2", TokenObtainedAt: time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 12, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() assert.True(t, d.runCommand.resetOneRunnerToken()) }) d.handleRunAtCall(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 2, common.UnknownSystemID) d.wait() runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1_2", runner.Token) assert.Equal(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 11, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) runner = d.runCommand.configfile.Config().Runners[1] assert.Equal(t, "token2_2", runner.Token) assert.Equal(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 12, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring, one non-expiring runner": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1_1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), // 0001-01-01T00:00:00.0 is the "zero" value of time.Time and is used // by resetting mechanism to recognize runners that don't have expiration time assigned TokenExpiresAt: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, { ID: 2, Token: "token2_1", TokenObtainedAt: time.Date(2022, 1, 2, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 10, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(2, &common.ResetTokenResponse{ Token: "token2_2", TokenObtainedAt: time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 12, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() assert.True(t, d.runCommand.resetOneRunnerToken()) }) d.handleRunAtCall(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 2, common.UnknownSystemID) d.wait() runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1_1", runner.Token) assert.Equal(t, time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) runner = d.runCommand.configfile.Config().Runners[1] assert.Equal(t, "token2_2", runner.Token) assert.Equal(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 12, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner stop": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.False(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) event := d.awaitRunAtCall(t) assert.Equal(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), event.time) d.stop() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1", runner.Token) assert.Equal(t, time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner reload config": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.True(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) event := d.awaitRunAtCall(t) assert.Equal(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), event.time) d.reloadConfig() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1", runner.Token) assert.Equal(t, time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner rewrite and reload config": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.True(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) event := d.awaitRunAtCall(t) assert.Equal(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC), event.time) d.setRunners([]common.RunnerCredentials{ { ID: 1, Token: "token2", TokenObtainedAt: time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 16, 0, 0, 0, 0, time.UTC), }, }) d.reloadConfig() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token2", runner.Token) assert.Equal(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 16, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) d.pushToWaitGroup(func() { d.mockResetToken(1, &common.ResetTokenResponse{ Token: "token3", TokenObtainedAt: time.Date(2022, 1, 14, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 22, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() assert.True(t, d.runCommand.resetOneRunnerToken()) }) d.handleRunAtCall(t, time.Date(2022, 1, 14, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 1, common.UnknownSystemID) d.wait() runner = d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token3", runner.Token) assert.Equal(t, time.Date(2022, 1, 14, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 22, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner rewrite and reload config race condition": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { assert.True(t, d.runCommand.resetOneRunnerToken()) d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) d.setRunners([]common.RunnerCredentials{ { ID: 1, Token: "token2", TokenObtainedAt: time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 16, 0, 0, 0, 0, time.UTC), }, }) event := d.awaitRunAtCall(t) d.reloadConfig() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token2", runner.Token) assert.Equal(t, time.Date(2022, 1, 8, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 16, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner error": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(1, nil) assert.True(t, d.runCommand.resetOneRunnerToken()) d.assertConfigSaveNotCalled(t) }) d.handleRunAtCall(t, time.Date(2022, 1, 7, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 1, common.UnknownSystemID) d.wait() runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1", runner.Token) assert.Equal(t, time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 9, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { d := newResetRunnerTokenTestController(t) d.setRunners(tc.runners) tc.testProcedure(t, d) d.finish() }) } } func TestRunCommand_resetRunnerTokens(t *testing.T) { testCases := map[string]resetRunnerTokenTestCase{ "one non-expiring runner": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), // 0001-01-01T00:00:00.0 is the "zero" value of time.Time and is used // by resetting mechanism to recognize runners that don't have expiration time assigned TokenExpiresAt: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.runCommand.resetRunnerTokens() d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) d.stop() d.wait() }, }, "one expiring runner stop": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.runCommand.resetRunnerTokens() d.assertResetTokenNotCalled(t) d.assertConfigSaveNotCalled(t) }) event := d.awaitRunAtCall(t) d.stop() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token1", runner.Token) assert.Equal(t, time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner with non-expiring response": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(1, &common.ResetTokenResponse{ Token: "token2", TokenObtainedAt: time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() d.runCommand.resetRunnerTokens() }) d.handleRunAtCall(t, time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC)) d.stop() d.handleResetTokenRequest(t, 1, common.UnknownSystemID) d.wait() runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token2", runner.Token) assert.Equal(t, time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(1, 1, 1, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, "one expiring runner with expiring response": { runners: []common.RunnerCredentials{ { ID: 1, Token: "token1", TokenObtainedAt: time.Date(2022, 1, 1, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), }, }, testProcedure: func(t *testing.T, d *resetRunnerTokenTestController) { d.pushToWaitGroup(func() { d.mockResetToken(1, &common.ResetTokenResponse{ Token: "token2", TokenObtainedAt: time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC), TokenExpiresAt: time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), }) d.mockConfigSave() d.runCommand.resetRunnerTokens() }) d.handleRunAtCall(t, time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC)) d.handleResetTokenRequest(t, 1, common.UnknownSystemID) event := d.awaitRunAtCall(t) d.stop() d.wait() assert.True(t, event.task.cancelled) assert.False(t, event.task.finished) runner := d.runCommand.configfile.Config().Runners[0] assert.Equal(t, "token2", runner.Token) assert.Equal(t, time.Date(2022, 1, 13, 0, 0, 0, 0, time.UTC), runner.TokenObtainedAt) assert.Equal(t, time.Date(2022, 1, 17, 0, 0, 0, 0, time.UTC), runner.TokenExpiresAt) }, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { d := newResetRunnerTokenTestController(t) d.setRunners(tc.runners) tc.testProcedure(t, d) d.finish() }) } } func TestRunCommand_configReloadingRegression(t *testing.T) { // fake config configName := filepath.Join(t.TempDir(), "config-reload-test") require.NoError(t, os.WriteFile(configName, nil, 0o777)) c := &RunCommand{ ConfigFile: configName, configfile: configfile.New(configName), runInterruptSignal: make(chan os.Signal, 1), reloadSignal: make(chan os.Signal, 1), configReloaded: make(chan int, 1), reloadConfigInterval: 10 * time.Millisecond, } ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) defer cancel() // Counting discovered configuration reloads var configReloadedCount atomic.Int64 done := make(chan struct{}) go func() { for { select { case <-ctx.Done(): close(done) return case <-c.configReloaded: configReloadedCount.Add(1) default: c.updateConfig() } } }() // force reload twice require.NoError(t, c.reloadConfig()) require.NoError(t, c.reloadConfig()) // trigger automatic reload (by changing time of config file) and wait update := time.Now().Add(time.Second) require.NoError(t, os.Chtimes(configName, update, update)) // sleep for 5 times the reload config interval to make sure we don't reload // more than we should time.Sleep(c.reloadConfigInterval * 5) cancel() for len(c.configReloaded) > 0 { <-c.configReloaded configReloadedCount.Add(1) } <-done assert.Equal(t, int64(3), configReloadedCount.Load()) } func TestRunCommand_configReloading(t *testing.T) { // This test is flaky on Win21H2 platform // Skipping until https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37920 is resolved. helper_test.SkipIfGitLabCIOn(t, helper_test.OSWindows) _, cleanup := test.NewHook() defer cleanup() config := `concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0` configChanged := `concurrent = 1 check_interval = 1 shutdown_timeout = 0` configName := filepath.Join(t.TempDir(), "config-reload-test") require.NoError(t, os.WriteFile(configName, []byte(config), 0o777)) c := &RunCommand{ ConfigFile: configName, configfile: configfile.New(configName), runInterruptSignal: make(chan os.Signal, 1), reloadSignal: make(chan os.Signal, 1), configReloaded: make(chan int, 1), reloadConfigInterval: 10 * time.Millisecond, } ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) defer cancel() // Counting discovered configuration reloads var configReloadedCount atomic.Int64 var wg sync.WaitGroup wg.Add(1) go func() { for { select { case <-ctx.Done(): wg.Done() return case <-c.configReloaded: configReloadedCount.Add(1) default: c.updateConfig() } } }() // force reload twice require.NoError(t, c.reloadConfig()) require.NoError(t, c.reloadConfig()) // trigger automatic reload (by changing time of config file) and wait file, err := os.OpenFile(configName, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o777) require.NoError(t, err) _, err = file.WriteString(configChanged) require.NoError(t, err) file.Close() // sleep for 15 times the reload config interval to make sure we don't reload // more than we should time.Sleep(c.reloadConfigInterval * 15) cancel() for len(c.configReloaded) > 0 { <-c.configReloaded configReloadedCount.Add(1) } wg.Wait() assert.Equal(t, "info", logrus.GetLevel().String()) assert.Equal(t, int64(3), configReloadedCount.Load()) } func TestListenAddress(t *testing.T) { type source string const ( configurationFromCli source = "from-cli" configurationFromConfig source = "from-config" ) examples := map[string]struct { address string setAddress bool expectedAddress string errorIsExpected bool }{ "address-set-without-port": {"localhost", true, "localhost:9252", false}, "port-set-without-address": {":1234", true, ":1234", false}, "address-set-with-port": {"localhost:1234", true, "localhost:1234", false}, "address-is-empty": {"", true, "", false}, "address-is-invalid": {"localhost::1234", true, "", true}, "address-not-set": {"", false, "", false}, } for exampleName, example := range examples { for _, testType := range []source{configurationFromCli, configurationFromConfig} { t.Run(fmt.Sprintf("%s-%s", exampleName, testType), func(t *testing.T) { cfg := &common.Config{} var address string if example.setAddress { if testType == configurationFromCli { address = example.address } else { cfg.ListenAddress = example.address } } address, err := listenAddress(cfg, address) assert.Equal(t, example.expectedAddress, address) if example.errorIsExpected { assert.Error(t, err) } else { assert.NoError(t, err) } }) } } } func TestRequestBottleneckWarning(t *testing.T) { tests := []struct { name string config *common.Config expectWarning bool expectedWarnings []string // Specific warning messages to look for description string }{ { name: "worker_starvation", config: &common.Config{ Concurrent: 2, Runners: []*common.RunnerConfig{ {RunnerCredentials: common.RunnerCredentials{Token: "runner1"}}, {RunnerCredentials: common.RunnerCredentials{Token: "runner2"}}, {RunnerCredentials: common.RunnerCredentials{Token: "runner3"}}, }, }, expectWarning: true, expectedWarnings: []string{"Worker starvation bottleneck"}, description: "Should warn when concurrent < runners", }, { name: "request_bottleneck", config: &common.Config{ Concurrent: 4, Runners: []*common.RunnerConfig{ { RequestConcurrency: 1, Limit: 10, RunnerCredentials: common.RunnerCredentials{Token: "runner1"}, }, { RequestConcurrency: 1, Limit: 8, RunnerCredentials: common.RunnerCredentials{Token: "runner2"}, }, }, }, expectWarning: true, expectedWarnings: []string{"Request bottleneck"}, description: "Should warn about request bottleneck", }, { name: "build_limit_saturation", config: &common.Config{ Concurrent: 4, Runners: []*common.RunnerConfig{ { Limit: 2, RequestConcurrency: 1, RunnerCredentials: common.RunnerCredentials{Token: "runner1"}, }, { Limit: 1, RequestConcurrency: 1, RunnerCredentials: common.RunnerCredentials{Token: "runner2"}, }, }, }, expectWarning: true, expectedWarnings: []string{"Build limit bottleneck"}, description: "Should warn about build limit saturation", }, { name: "multiple_scenarios", config: &common.Config{ Concurrent: 4, Runners: []*common.RunnerConfig{ { RequestConcurrency: 1, Limit: 2, RunnerCredentials: common.RunnerCredentials{Token: "runner1"}, }, { RequestConcurrency: 1, Limit: 1, RunnerCredentials: common.RunnerCredentials{Token: "runner2"}, }, { RequestConcurrency: 2, Limit: 5, RunnerCredentials: common.RunnerCredentials{Token: "runner3"}, }, }, }, expectWarning: true, expectedWarnings: []string{"Request bottleneck", "Build limit bottleneck"}, description: "Should warn about multiple issues", }, { name: "healthy_configuration", config: &common.Config{ Concurrent: 6, Runners: []*common.RunnerConfig{ { RequestConcurrency: 3, Limit: 10, RunnerCredentials: common.RunnerCredentials{Token: "runner1"}, }, { RequestConcurrency: 2, Limit: 5, RunnerCredentials: common.RunnerCredentials{Token: "runner2"}, }, }, }, expectWarning: false, expectedWarnings: nil, description: "Should not warn for healthy configuration", }, { name: "adequate_concurrent", config: &common.Config{ Concurrent: 3, Runners: []*common.RunnerConfig{ { RequestConcurrency: 2, RunnerCredentials: common.RunnerCredentials{Token: "runner1"}, }, { RequestConcurrency: 2, RunnerCredentials: common.RunnerCredentials{Token: "runner2"}, }, { RequestConcurrency: 2, RunnerCredentials: common.RunnerCredentials{Token: "runner3"}, }, }, }, expectWarning: false, expectedWarnings: nil, description: "Should not warn when concurrent >= runners", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { hook, cleanup := test.NewHook() defer cleanup() logrus.SetLevel(logrus.WarnLevel) logrus.SetOutput(io.Discard) cmd := RunCommand{ configfile: configfile.New("", configfile.WithExistingConfig(tt.config), configfile.WithSystemID(common.UnknownSystemID)), } cmd.checkConfigConcurrency(tt.config) foundMainWarning := false for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, "CONFIGURATION:") && strings.Contains(entry.Message, "Long polling issues detected") { foundMainWarning = true break } } if !tt.expectWarning { assert.False(t, foundMainWarning, tt.description) return } assert.True(t, foundMainWarning, tt.description) for _, expectedWarning := range tt.expectedWarnings { foundSpecificWarning := false for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, expectedWarning) { foundSpecificWarning = true break } } assert.True(t, foundSpecificWarning, fmt.Sprintf("Should contain warning: %s", expectedWarning)) } }) } } func TestRunCommand_requestJob_HandlesUpdateAbort(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } jobData := &spec.Job{ ID: 123, Token: "job-token", } network := common.NewMockNetwork(t) mockTrace := common.NewMockJobTrace(t) mockTrace.On("SetFailuresCollector", mock.Anything).Return() mockTrace.On("Finish").Return() // Mock RequestJob to return a job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(jobData, true) // Mock ProcessJob to return a trace network.On("ProcessJob", *runner, mock.AnythingOfType("*common.JobCredentials")).Return(mockTrace, nil) // Mock UpdateJob to return UpdateAbort network.On("UpdateJob", *runner, mock.AnythingOfType("*common.JobCredentials"), mock.AnythingOfType("common.UpdateJobInfo")). Return(common.UpdateJobResult{State: common.UpdateAbort}) cmd := &RunCommand{ network: network, } trace, response, err := cmd.requestJob(runner, nil) // When UpdateJob returns UpdateAbort, requestJob should return nil assert.Nil(t, trace, "Should return nil trace when update is aborted") assert.Nil(t, response, "Should return nil response when update is aborted") assert.Nil(t, err, "Should return nil error when update is aborted") network.AssertExpectations(t) mockTrace.AssertExpectations(t) } func TestRunCommand_requestJob_HandlesCancelRequested(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } jobData := &spec.Job{ ID: 123, Token: "job-token", } network := common.NewMockNetwork(t) mockTrace := common.NewMockJobTrace(t) mockTrace.On("SetFailuresCollector", mock.Anything).Return() mockTrace.On("Finish").Return() // Mock RequestJob to return a job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(jobData, true) // Mock ProcessJob to return a trace network.On("ProcessJob", *runner, mock.AnythingOfType("*common.JobCredentials")).Return(mockTrace, nil) // Mock UpdateJob to return success but with CancelRequested=true network.On("UpdateJob", *runner, mock.AnythingOfType("*common.JobCredentials"), mock.AnythingOfType("common.UpdateJobInfo")). Return(common.UpdateJobResult{State: common.UpdateSucceeded, CancelRequested: true}) cmd := &RunCommand{ network: network, } trace, response, err := cmd.requestJob(runner, nil) // When UpdateJob has CancelRequested=true, requestJob should return nil assert.Nil(t, trace, "Should return nil trace when job is being canceled") assert.Nil(t, response, "Should return nil response when job is being canceled") assert.Nil(t, err, "Should return nil error when job is being canceled") network.AssertExpectations(t) mockTrace.AssertExpectations(t) } func TestRunCommand_requestJob_ContinuesWhenUpdateSucceeds(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } jobData := &spec.Job{ ID: 123, Token: "job-token", } mockTrace := &common.MockJobTrace{} mockTrace.On("SetFailuresCollector", mock.Anything).Return() network := common.NewMockNetwork(t) // Mock RequestJob to return a job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(jobData, true) // Mock UpdateJob to return success network.On("UpdateJob", *runner, mock.AnythingOfType("*common.JobCredentials"), mock.AnythingOfType("common.UpdateJobInfo")). Return(common.UpdateJobResult{State: common.UpdateSucceeded}) // Mock ProcessJob to return a trace network.On("ProcessJob", *runner, mock.AnythingOfType("*common.JobCredentials")).Return(mockTrace, nil) cmd := &RunCommand{ network: network, } trace, response, err := cmd.requestJob(runner, nil) // When UpdateJob succeeds, requestJob should continue and return the job assert.Equal(t, mockTrace, trace, "Should return the job trace when update succeeds") assert.Equal(t, jobData, response, "Should return the job response when update succeeds") assert.Nil(t, err, "Should return no error when update succeeds") network.AssertExpectations(t) mockTrace.AssertExpectations(t) } func TestRunCommand_requestJob_ReturnsNilWhenNoJob(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } network := common.NewMockNetwork(t) // Mock RequestJob to return no job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(nil, true) cmd := &RunCommand{ network: network, } trace, response, err := cmd.requestJob(runner, nil) // When no job is available, requestJob should return nil without calling UpdateJob assert.Nil(t, trace, "Should return nil trace when no job available") assert.Nil(t, response, "Should return nil response when no job available") assert.Nil(t, err, "Should return nil error when no job available") network.AssertExpectations(t) } ================================================ FILE: commands/register.go ================================================ package commands import ( "bufio" "errors" "fmt" "io" "os" "os/signal" "runtime" "strings" "time" "dario.cat/mergo" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/network" "gitlab.com/gitlab-org/gitlab-runner/shells" "gitlab.com/gitlab-org/labkit/fips" ) type configTemplate struct { *common.Config ConfigFile string `long:"config" env:"TEMPLATE_CONFIG_FILE" description:"Path to the configuration template file"` } func (c *configTemplate) Enabled() bool { return c.ConfigFile != "" } func (c *configTemplate) MergeTo(config *common.RunnerConfig) error { err := c.loadConfigTemplate() if err != nil { return fmt.Errorf("couldn't load configuration template file: %w", err) } if len(c.Runners) != 1 { return errors.New("configuration template must contain exactly one [[runners]] entry") } c.Runners[0].Token = "" err = mergo.Merge(config, c.Runners[0]) if err != nil { return fmt.Errorf("error while merging configuration with configuration template: %w", err) } return nil } func (c *configTemplate) loadConfigTemplate() error { config := common.NewConfig() err := config.LoadConfig(c.ConfigFile) if err != nil { return err } c.Config = config return nil } type RegisterCommand struct { context *cli.Context network common.Network executorProviders executors.Providers reader *bufio.Reader registered bool timeNowFn func() time.Time ConfigTemplate configTemplate `namespace:"template"` ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` TagList string `long:"tag-list" env:"RUNNER_TAG_LIST" description:"Tag list"` NonInteractive bool `short:"n" long:"non-interactive" env:"REGISTER_NON_INTERACTIVE" description:"Run registration unattended"` LeaveRunner bool `long:"leave-runner" env:"REGISTER_LEAVE_RUNNER" description:"Don't remove runner if registration fails"` RegistrationToken string `short:"r" long:"registration-token" env:"REGISTRATION_TOKEN" description:"Runner's registration token (deprecated, use --token)"` RunUntagged bool `long:"run-untagged" env:"REGISTER_RUN_UNTAGGED" description:"Register to run untagged builds; defaults to 'true' when 'tag-list' is empty"` Locked bool `long:"locked" env:"REGISTER_LOCKED" description:"Lock Runner for current project, defaults to 'true'"` AccessLevel string `long:"access-level" env:"REGISTER_ACCESS_LEVEL" description:"Set access_level of the runner to not_protected or ref_protected; defaults to not_protected"` MaximumTimeout int `long:"maximum-timeout" env:"REGISTER_MAXIMUM_TIMEOUT" description:"What is the maximum timeout (in seconds) that will be set for job when using this Runner"` Paused bool `long:"paused" env:"REGISTER_PAUSED" description:"Set Runner to be paused, defaults to 'false'"` MaintenanceNote string `long:"maintenance-note" env:"REGISTER_MAINTENANCE_NOTE" description:"Runner's maintenance note"` common.RunnerConfig } func NewRegisterCommand(n common.Network, executorProviders executors.Providers) cli.Command { return common.NewCommand("register", "register a new runner", newRegisterCommand(n, executorProviders)) } type AccessLevel string const ( NotProtected AccessLevel = "not_protected" RefProtected AccessLevel = "ref_protected" ) const ( defaultDockerWindowCacheDir = "c:\\cache" ) func (s *RegisterCommand) askOnce(prompt string, result *string, allowEmpty bool) bool { println(prompt) if *result != "" { print("["+*result, "]: ") } if s.reader == nil { s.reader = bufio.NewReader(os.Stdin) } data, _, err := s.reader.ReadLine() if err == io.EOF && !s.NonInteractive { logrus.Panicln("Unexpected EOF. Did you mean to use --non-interactive?") } if err != nil { panic(err) } newResult := string(data) newResult = strings.TrimSpace(newResult) if newResult != "" { *result = newResult return true } if allowEmpty || *result != "" { return true } return false } func (s *RegisterCommand) ask(key, prompt string, allowEmptyOptional ...bool) string { allowEmpty := len(allowEmptyOptional) > 0 && allowEmptyOptional[0] result := s.context.String(key) result = strings.TrimSpace(result) if s.NonInteractive || prompt == "" { if result == "" && !allowEmpty { logrus.Panicln("The", key, "needs to be entered") } return result } for !s.askOnce(prompt, &result, allowEmpty) { } return result } func (s *RegisterCommand) askExecutor() { var names []string for name := range s.executorProviders.All() { names = append(names, name) } executorNames := strings.Join(names, ", ") for { s.Executor = s.ask("executor", "Enter an executor: "+executorNames+":", true) if s.executorProviders.GetByName(s.Executor) != nil { return } message := "Invalid executor specified" if s.NonInteractive { logrus.Panicln(message) } else { logrus.Errorln(message) } } } func (s *RegisterCommand) askDocker() { s.askBasicDocker("ruby:3.3") for _, volume := range s.Docker.Volumes { parts := strings.Split(volume, ":") if parts[len(parts)-1] == "/cache" { return } } if !s.Docker.DisableCache { s.Docker.Volumes = append(s.Docker.Volumes, "/cache") } } func (s *RegisterCommand) askDockerWindows() { s.askBasicDocker("mcr.microsoft.com/windows/servercore:1809") for _, volume := range s.Docker.Volumes { // This does not cover all the possibilities since we don't have access // to volume parsing package since it's internal. if strings.Contains(volume, defaultDockerWindowCacheDir) { return } } s.Docker.Volumes = append(s.Docker.Volumes, defaultDockerWindowCacheDir) } func (s *RegisterCommand) askBasicDocker(exampleHelperImage string) { if s.Docker == nil { s.Docker = &common.DockerConfig{} } s.Docker.Image = s.ask( "docker-image", fmt.Sprintf("Enter the default Docker image (for example, %s):", exampleHelperImage), ) } func (s *RegisterCommand) askParallels() { s.Parallels.BaseName = s.ask("parallels-base-name", "Enter the Parallels VM (for example, my-vm):") } func (s *RegisterCommand) askVirtualBox() { s.VirtualBox.BaseName = s.ask("virtualbox-base-name", "Enter the VirtualBox VM (for example, my-vm):") } func (s *RegisterCommand) askSSHServer() { s.SSH.Host = s.ask("ssh-host", "Enter the SSH server address (for example, my.server.com):") s.SSH.Port = s.ask("ssh-port", "Enter the SSH server port (for example, 22):", true) } func (s *RegisterCommand) askSSHLogin() { s.SSH.User = s.ask("ssh-user", "Enter the SSH user (for example, root):") s.SSH.Password = s.ask( "ssh-password", "Enter the SSH password (for example, docker.io):", true, ) s.SSH.IdentityFile = s.ask( "ssh-identity-file", "Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa):", true, ) } func (s *RegisterCommand) verifyRunner() { // If a runner authentication token is specified in place of a registration token, let's accept it and process it as // an authentication token. This allows for an easier transition for users by simply replacing the // registration token with the new authentication token. result := s.network.VerifyRunner(s.RunnerConfig, s.SystemID) if result == nil || result.ID == 0 { logrus.Panicln("Failed to verify the runner.") } s.ID = result.ID s.TokenObtainedAt = s.timeNowFn().UTC().Truncate(time.Second) s.TokenExpiresAt = result.TokenExpiresAt s.registered = true } func (s *RegisterCommand) askRunner(cfg *common.Config) { s.URL = s.ask("url", "Enter the GitLab instance URL (for example, https://gitlab.com/):") if s.Token != "" && !s.tokenIsRunnerToken() { logrus.Infoln("Token specified trying to verify runner...") logrus.Warningln("If you want to register use the '-r' instead of '-t'.") if s.network.VerifyRunner(s.RunnerConfig, s.SystemID) == nil { logrus.Panicln("Failed to verify the runner. You may be having network problems.") } return } if s.Token == "" || !s.tokenIsRunnerToken() { s.Token = s.ask("registration-token", "Enter the registration token:") } if !s.tokenIsRunnerToken() { s.Name = s.ask("name", "Enter a description for the runner:") s.doLegacyRegisterRunner() return } if r, err := cfg.RunnerByToken(s.Token); err == nil && r != nil { logrus.Warningln("A runner with this system ID and token has already been registered.") } // when a runner authentication token is specified as a registration token, certain arguments are reserved to the server s.ensureServerConfigArgsEmpty() s.verifyRunner() s.Name = s.ask("name", "Enter a name for the runner. This is stored only in the local config.toml file:") } func (s *RegisterCommand) doLegacyRegisterRunner() { s.TagList = s.ask("tag-list", "Enter tags for the runner (comma-separated):", true) s.MaintenanceNote = s.ask("maintenance-note", "Enter optional maintenance note for the runner:", true) if s.TagList == "" { s.RunUntagged = true } parameters := common.RegisterRunnerParameters{ Description: s.Name, MaintenanceNote: s.MaintenanceNote, Tags: s.TagList, Locked: s.Locked, AccessLevel: s.AccessLevel, RunUntagged: s.RunUntagged, MaximumTimeout: s.MaximumTimeout, Paused: s.Paused, } if s.Token != "" { logrus.Warningf( "Support for registration tokens and runner parameters in the 'register' command has been deprecated in " + "GitLab Runner 15.6 and will be replaced with support for authentication tokens. " + "For more information, see https://docs.gitlab.com/ci/runners/new_creation_workflow/", ) } result := s.network.RegisterRunner(s.RunnerConfig, parameters) // golangci-lint doesn't recognize logrus.Panicln() call as breaking the execution // flow which causes the following assignment to throw false-positive report for // 'SA5011: possible nil pointer dereference' //nolint:staticcheck if result == nil { logrus.Panicln("Failed to register the runner.") } s.ID = result.ID s.Token = result.Token s.TokenObtainedAt = s.timeNowFn().UTC().Truncate(time.Second) s.TokenExpiresAt = result.TokenExpiresAt s.registered = true } func (s *RegisterCommand) askExecutorOptions() { kubernetes := s.Kubernetes machine := s.Machine docker := s.Docker ssh := s.SSH parallels := s.Parallels virtualbox := s.VirtualBox custom := s.Custom s.Kubernetes = nil s.Machine = nil s.Docker = nil s.SSH = nil s.Parallels = nil s.VirtualBox = nil s.Custom = nil s.Referees = nil executorFns := map[string]func(){ "kubernetes": func() { s.Kubernetes = kubernetes }, "docker+machine": func() { s.Machine = machine s.Docker = docker s.askDocker() }, "docker": func() { s.Docker = docker s.askDocker() }, "docker-autoscaler": func() { s.Docker = docker s.askDocker() }, "docker-windows": func() { if s.RunnerConfig.Shell == "" { s.Shell = shells.SNPwsh } s.Docker = docker s.askDockerWindows() }, "ssh": func() { s.SSH = ssh s.askSSHServer() s.askSSHLogin() }, "parallels": func() { s.SSH = ssh s.Parallels = parallels s.askParallels() s.askSSHServer() }, "virtualbox": func() { s.SSH = ssh s.VirtualBox = virtualbox s.askVirtualBox() s.askSSHLogin() }, "shell": func() { if runtime.GOOS == osTypeWindows && s.RunnerConfig.Shell == "" { s.Shell = shells.SNPwsh } }, "custom": func() { s.Custom = custom }, } executorFn, ok := executorFns[s.Executor] if ok { executorFn() } } // Set helper_image_flavor to ubi-fips if fips is enabled. See // https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38273 func setFipsHelperImageFlavor(cfg *common.RunnerConfig, fipsEnabled func() bool) { if cfg == nil || !fipsEnabled() { return } if cfg.Docker != nil && cfg.Docker.HelperImageFlavor == "" { cfg.Docker.HelperImageFlavor = "ubi-fips" } if cfg.Kubernetes != nil && cfg.Kubernetes.HelperImageFlavor == "" { cfg.Kubernetes.HelperImageFlavor = "ubi-fips" } } func (s *RegisterCommand) Execute(context *cli.Context) { userModeWarning(true) s.context = context validAccessLevels := []AccessLevel{NotProtected, RefProtected} if !accessLevelValid(validAccessLevels, AccessLevel(s.AccessLevel)) { logrus.Panicln("Given access-level is not valid. " + "Refer to gitlab-runner register -h for the correct options.") } s.mergeTemplate() cfg := configfile.New(s.ConfigFile) if err := cfg.Load(configfile.WithMutateOnLoad(func(config *common.Config) error { s.SystemID = cfg.SystemID() s.askRunner(config) if !s.LeaveRunner { defer s.unregisterRunnerFunc()() } s.askExecutor() s.askExecutorOptions() setFipsHelperImageFlavor(&s.RunnerConfig, fips.Enabled) config.Runners = append(config.Runners, &s.RunnerConfig) return nil })); err != nil { logrus.Panicln(err) } if err := cfg.Save(); err != nil { logrus.Panicln(err) } config := cfg.Config() if config.Concurrent < s.Limit { logrus.Warningf( "The specified runner job concurrency limit (%d) is larger than current global concurrency limit (%d). "+ "The global concurrent limit will not be increased and takes precedence.", s.Limit, config.Concurrent, ) } if config.Concurrent < s.RequestConcurrency { logrus.Warningf( "The specified runner request concurrency (%d) is larger than the current global concurrent limit (%d). "+ "The global concurrent limit will not be increased and takes precedence.", s.RequestConcurrency, config.Concurrent, ) } logrus.Printf( "Runner registered successfully. " + "Feel free to start it, but if it's running already the config should be automatically reloaded!\n") logrus.Printf("Configuration (with the authentication token) was saved in %q", s.ConfigFile) } func (s *RegisterCommand) unregisterRunnerFunc() func() { signals := make(chan os.Signal, 1) signal.Notify(signals, os.Interrupt) go func() { signal := <-signals s.unregisterRunner() logrus.Fatalf("RECEIVED SIGNAL: %v", signal) }() return func() { // De-register runner on panic if r := recover(); r != nil { if s.registered { s.unregisterRunner() } // pass panic to next defer panic(r) } } } func (s *RegisterCommand) unregisterRunner() { if s.tokenIsRunnerToken() { s.network.UnregisterRunnerManager(s.RunnerConfig, s.SystemID) } else { s.network.UnregisterRunner(s.RunnerConfig) } } func (s *RegisterCommand) mergeTemplate() { if !s.ConfigTemplate.Enabled() { return } logrus.Infof("Merging configuration from template file %q", s.ConfigTemplate.ConfigFile) err := s.ConfigTemplate.MergeTo(&s.RunnerConfig) if err != nil { logrus.WithError(err).Fatal("Could not handle configuration merging from template file") } } func (s *RegisterCommand) tokenIsRunnerToken() bool { return network.TokenIsCreatedRunnerToken(s.Token) } func (s *RegisterCommand) ensureServerConfigArgsEmpty() { if s.Locked && s.AccessLevel == "" && !s.RunUntagged && s.MaximumTimeout == 0 && !s.Paused && s.TagList == "" && s.MaintenanceNote == "" { return } if s.RegistrationToken == s.Token { logrus.Warningln( "You have specified an authentication token in the legacy parameter --registration-token. " + "This has triggered the 'legacy-compatible registration process' which has resulted in the " + "following command line parameters being ignored: --locked, --access-level, --run-untagged, " + "--maximum-timeout, --paused, --tag-list, and --maintenance-note. " + "For more information, see https://docs.gitlab.com/ci/runners/new_creation_workflow/#changes-to-the-gitlab-runner-register-command-syntax" + "These parameters and the legacy-compatible registration process will be removed " + "in a future GitLab Runner release. ", ) return } logrus.Fatalln( "Runner configuration other than name and executor configuration is reserved (specifically --locked, " + "--access-level, --run-untagged, --maximum-timeout, --paused, --tag-list, and --maintenance-note) " + "and cannot be specified when registering with a runner authentication token. " + "This configuration is specified on the GitLab server. " + "Please try again without specifying any of those arguments. " + "For more information, see https://docs.gitlab.com/ci/runners/new_creation_workflow/#changes-to-the-gitlab-runner-register-command-syntax", ) } func getHostname() string { hostname, _ := os.Hostname() return hostname } func newRegisterCommand(n common.Network, executorProviders executors.Providers) *RegisterCommand { return &RegisterCommand{ RunnerConfig: common.RunnerConfig{ Name: getHostname(), RunnerSettings: common.RunnerSettings{ Kubernetes: &common.KubernetesConfig{}, Cache: &cacheconfig.Config{}, Machine: &common.DockerMachine{}, Docker: &common.DockerConfig{}, SSH: &common.SshConfig{}, Parallels: &common.ParallelsConfig{}, VirtualBox: &common.VirtualBoxConfig{}, }, }, Locked: true, Paused: false, network: n, executorProviders: executorProviders, timeNowFn: time.Now, } } func accessLevelValid(levels []AccessLevel, givenLevel AccessLevel) bool { if givenLevel == "" { return true } for _, level := range levels { if givenLevel == level { return true } } return false } ================================================ FILE: commands/register_integration_test.go ================================================ //go:build integration package commands_test import ( "bufio" "fmt" "os" "regexp" "runtime" "strings" "testing" "time" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "github.com/urfave/cli" clihelpers "gitlab.com/gitlab-org/golang-cli-helpers" "gitlab.com/gitlab-org/gitlab-runner/commands" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/custom" "gitlab.com/gitlab-org/gitlab-runner/executors/docker" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/machine" "gitlab.com/gitlab-org/gitlab-runner/executors/kubernetes" "gitlab.com/gitlab-org/gitlab-runner/executors/parallels" "gitlab.com/gitlab-org/gitlab-runner/executors/shell" "gitlab.com/gitlab-org/gitlab-runner/executors/ssh" "gitlab.com/gitlab-org/gitlab-runner/executors/virtualbox" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/shells" ) const osTypeWindows = "windows" var spaceReplacer = strings.NewReplacer(" ", "", "\t", "") type kv struct { key, value string } func TestAccessLevelSetting(t *testing.T) { tests := map[string]struct { accessLevel commands.AccessLevel failureExpected bool }{ "access level not defined": {}, "ref_protected used": { accessLevel: commands.RefProtected, }, "not_protected used": { accessLevel: commands.NotProtected, }, "unknown access level": { accessLevel: commands.AccessLevel("unknown"), failureExpected: true, }, } for testName, testCase := range tests { t.Run(testName, func(t *testing.T) { network := common.NewMockNetwork(t) if !testCase.failureExpected { parametersMocker := mock.MatchedBy(func(parameters common.RegisterRunnerParameters) bool { return commands.AccessLevel(parameters.AccessLevel) == testCase.accessLevel }) network.On("RegisterRunner", mock.Anything, parametersMocker). Return(&common.RegisterRunnerResponse{ Token: "test-runner-token", }). Once() } arguments := []string{ "--registration-token", "test-runner-token", "--access-level", string(testCase.accessLevel), } _, output, err := testRegisterCommandRun(t, network, nil, "", arguments...) if testCase.failureExpected { assert.EqualError(t, err, "command error: Given access-level is not valid. "+ "Refer to gitlab-runner register -h for the correct options.") assert.NotContains(t, output, "Runner registered successfully.") return } assert.NoError(t, err) assert.Contains(t, output, "Runner registered successfully.") }) } } func TestAskRunnerOverrideDefaultsForExecutors(t *testing.T) { executors := []string{ "kubernetes", "docker+machine", "docker", "ssh", "custom", "parallels", "virtualbox", "shell", } if runtime.GOOS == osTypeWindows { executors = append(executors, "docker-windows") } for _, executor := range executors { t.Run(executor, func(t *testing.T) { testAskRunnerOverrideDefaultsForExecutor(t, executor) }) } } func isValidToken(systemID string) bool { ok, _ := regexp.MatchString("^[sr]_[0-9a-zA-Z]{12}$", systemID) return ok } func TestAskRunnerUsingRunnerTokenOverrideDefaults(t *testing.T) { const executor = "docker" basicValidation := func(s *commands.RegisterCommand) { assert.Equal(t, "http://gitlab.example.com/", s.URL) assert.Equal(t, "glrt-testtoken", s.Token) assert.Equal(t, executor, s.RunnerSettings.Executor) } expectedParamsFn := func(p common.RunnerConfig) bool { return p.URL == "http://gitlab.example.com/" && p.Token == "glrt-testtoken" } tests := map[string]struct { answers []string arguments []string validate func(s *commands.RegisterCommand) expectedParams func(common.RunnerConfig) bool }{ "basic answers": { answers: append([]string{ "http://gitlab.example.com/", "glrt-testtoken", "name", }, executorAnswers(t, executor)...), validate: basicValidation, expectedParams: expectedParamsFn, }, "basic arguments, accepting provided": { answers: make([]string, 9), arguments: append( executorCmdLineArgs(t, executor), "--url", "http://gitlab.example.com/", "-r", "glrt-testtoken", "--name", "name", ), validate: basicValidation, expectedParams: expectedParamsFn, }, "basic arguments override": { answers: append( []string{"http://gitlab.example2.com/", "glrt-testtoken2", "new-name", executor}, executorOverrideAnswers(t, executor)..., ), arguments: append( executorCmdLineArgs(t, executor), "--url", "http://gitlab.example.com/", "-r", "glrt-testtoken", "--name", "name", ), validate: func(s *commands.RegisterCommand) { assert.Equal(t, "http://gitlab.example2.com/", s.URL) assert.Equal(t, "glrt-testtoken2", s.Token) assert.Equal(t, "new-name", s.Name) assert.Equal(t, executor, s.RunnerSettings.Executor) require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "nginx:latest", s.RunnerSettings.Docker.Image) }, expectedParams: func(p common.RunnerConfig) bool { return p.URL == "http://gitlab.example2.com/" && p.Token == "glrt-testtoken2" }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { network := common.NewMockNetwork(t) network.On("VerifyRunner", mock.MatchedBy(tc.expectedParams), mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 12345, Token: "glrt-testtoken", }). Once() cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(tc.answers, "\n")+"\n")), network, testExecutorProviders(), ) app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } hook := test.NewGlobal() args := append(tc.arguments, "--leave-runner") args, cleanTempFile := useTempConfigFile(t, args) defer cleanTempFile() err := app.Run(append([]string{"runner", "register"}, args...)) output := commands.GetLogrusOutput(t, hook) assert.NoError(t, err) tc.validate(cmd) assert.Contains(t, output, "Runner registered successfully.") }) } } func TestAskRunnerUsingRunnerTokenOnRegistrationTokenOverridingForbiddenDefaults(t *testing.T) { tests := map[string]interface{}{ "--access-level": "not_protected", "--run-untagged": true, "--maximum-timeout": 1, "--paused": true, "--tag-list": "tag", "--maintenance-note": "note", } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { network := common.NewMockNetwork(t) network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 1, Token: "glrt-testtoken", }). Once() answers := make([]string, 4) arguments := append( executorCmdLineArgs(t, "shell"), "--url", "http://gitlab.example.com/", "-r", "glrt-testtoken", tn, fmt.Sprintf("%v", tc), ) cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(answers, "\n")+"\n")), network, testExecutorProviders(), ) hook := test.NewGlobal() app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } _ = app.Run(append([]string{"runner", "register"}, arguments...)) assert.Contains( t, commands.GetLogrusOutput(t, hook), "This has triggered the 'legacy-compatible registration process'", ) }) } } func TestAskRunnerUsingRunnerTokenOverridingForbiddenDefaults(t *testing.T) { tests := map[string]interface{}{ "--access-level": "not_protected", "--run-untagged": true, "--maximum-timeout": 1, "--paused": true, "--tag-list": "tag", "--maintenance-note": "note", } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { removeHooksFn := helpers.MakeFatalToPanic() defer removeHooksFn() network := common.NewMockNetwork(t) answers := make([]string, 4) arguments := append( executorCmdLineArgs(t, "shell"), "--url", "http://gitlab.example.com/", "-t", "glrt-testtoken", tn, fmt.Sprintf("%v", tc), ) cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(answers, "\n")+"\n")), network, testExecutorProviders(), ) hook := test.NewGlobal() app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } defer func() { var output string if r := recover(); r != nil { // log panics force exit if e, ok := r.(*logrus.Entry); ok { output = e.Message } } if output == "" { output = commands.GetLogrusOutput(t, hook) } assert.Contains(t, output, "Runner configuration other than name and executor configuration is reserved") }() _ = app.Run(append([]string{"runner", "register"}, arguments...)) assert.Fail(t, "Should not reach this point") }) } } func testRegisterCommandRun( t *testing.T, network common.Network, env []kv, initialConfig string, args ...string, ) (content, output string, err error) { for _, kv := range env { err := os.Setenv(kv.key, kv.value) if err != nil { return "", "", err } } defer func() { for _, kv := range env { _ = os.Unsetenv(kv.key) } }() hook := test.NewGlobal() defer func() { output = commands.GetLogrusOutput(t, hook) assert.NotContains(t, output, "problem with your config based on jsonschema annotations") if r := recover(); r != nil { // log panics forces exit if e, ok := r.(*logrus.Entry); ok { err = fmt.Errorf("command error: %s", e.Message) } } }() cmd := commands.NewRegisterCommandForTest(nil, network, testExecutorProviders()) app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } configFile, err := os.CreateTemp("", "config.toml") require.NoError(t, err) _, err = configFile.WriteString(initialConfig) require.NoError(t, err) err = configFile.Close() require.NoError(t, err) defer os.Remove(configFile.Name()) args = append([]string{ "binary", "register", "-n", "--config", configFile.Name(), "--url", "http://gitlab.example.com/", }, args...) if !contains(args, "--executor") { args = append(args, "--executor", "shell") } commandErr := app.Run(args) fileContent, err := os.ReadFile(configFile.Name()) require.NoError(t, err) err = commandErr return string(fileContent), output, err } func contains(args []string, s string) bool { for _, arg := range args { if arg == s { return true } } return false } func testAskRunnerOverrideDefaultsForExecutor(t *testing.T, executor string) { basicValidation := func(s *commands.RegisterCommand) { assertExecutorDefaultValues(t, executor, s) } tests := map[string]struct { answers []string arguments []string validate func(s *commands.RegisterCommand) expectedParams func(common.RegisterRunnerParameters) bool }{ "basic answers": { answers: append([]string{ "http://gitlab.example.com/", "test-registration-token", "name", "tag,list", "basic notes", }, executorAnswers(t, executor)...), validate: basicValidation, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "name", MaintenanceNote: "basic notes", Tags: "tag,list", Locked: true, Paused: false, } }, }, "basic arguments, accepting provided": { answers: make([]string, 11), arguments: append( executorCmdLineArgs(t, executor), "--url", "http://gitlab.example.com/", "-r", "test-registration-token", "--name", "name", "--tag-list", "tag,list", "--maintenance-note", "maintainer notes", "--paused", "--locked=false", ), validate: basicValidation, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "name", MaintenanceNote: "maintainer notes", Tags: "tag,list", Paused: true, } }, }, "basic arguments override": { answers: append([]string{"", "", "new-name", "", "maintainer notes", ""}, executorOverrideAnswers(t, executor)...), arguments: append( executorCmdLineArgs(t, executor), "--url", "http://gitlab.example.com/", "-r", "test-registration-token", "--name", "name", "--maintenance-note", "notes", "--tag-list", "tag,list", "--paused", "--locked=false", ), validate: func(s *commands.RegisterCommand) { assertExecutorOverridenValues(t, executor, s) }, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "new-name", MaintenanceNote: "maintainer notes", Tags: "tag,list", Paused: true, } }, }, "untagged implicit": { answers: append([]string{ "http://gitlab.example.com/", "test-registration-token", "name", "", "", }, executorAnswers(t, executor)...), validate: basicValidation, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "name", RunUntagged: true, Locked: true, Paused: false, } }, }, "untagged explicit": { answers: append([]string{ "http://gitlab.example.com/", "test-registration-token", "name", "", "", }, executorAnswers(t, executor)...), arguments: []string{"--run-untagged"}, validate: basicValidation, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "name", RunUntagged: true, Locked: true, Paused: false, } }, }, "untagged explicit with tags provided": { answers: append([]string{ "http://gitlab.example.com/", "test-registration-token", "name", "tag,list", "", }, executorAnswers(t, executor)...), arguments: []string{"--run-untagged"}, validate: basicValidation, expectedParams: func(p common.RegisterRunnerParameters) bool { return p == common.RegisterRunnerParameters{ Description: "name", Tags: "tag,list", RunUntagged: true, Locked: true, Paused: false, } }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { network := common.NewMockNetwork(t) network.On("RegisterRunner", mock.Anything, mock.MatchedBy(tc.expectedParams)). Return(&common.RegisterRunnerResponse{ Token: "test-runner-token", }). Once() cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(tc.answers, "\n")+"\n")), network, testExecutorProviders(), ) app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } hook := test.NewGlobal() err := app.Run(append([]string{"runner", "register"}, tc.arguments...)) output := commands.GetLogrusOutput(t, hook) assert.NoError(t, err) tc.validate(cmd) assert.Contains(t, output, "Runner registered successfully.") }) } } func assertExecutorDefaultValues(t *testing.T, executor string, s *commands.RegisterCommand) { assert.Equal(t, "http://gitlab.example.com/", s.URL) assert.Equal(t, "test-runner-token", s.Token) assert.Equal(t, executor, s.RunnerSettings.Executor) switch executor { case "kubernetes": assert.NotNil(t, s.RunnerSettings.Kubernetes) case "custom": assert.NotNil(t, s.RunnerSettings.Custom) case "shell": assert.NotNil(t, s.RunnerSettings.Shell) if runtime.GOOS == osTypeWindows && s.RunnerConfig.Shell == "" { assert.Equal(t, "powershell", s.RunnerSettings.Shell) } case "docker": require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "busybox:latest", s.RunnerSettings.Docker.Image) case "docker-windows": require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "mcr.microsoft.com/windows/servercore:YYH1", s.RunnerSettings.Docker.Image) case "docker+machine": assert.NotNil(t, s.RunnerSettings.Machine) require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "busybox:latest", s.RunnerSettings.Docker.Image) case "ssh": assertDefaultSSHLogin(t, s.RunnerSettings.SSH) assertDefaultSSHServer(t, s.RunnerSettings.SSH) case "parallels": assertDefaultSSHServer(t, s.RunnerSettings.SSH) require.NotNil(t, s.RunnerSettings.Parallels) assert.Equal(t, executor+"-vm-name", s.RunnerSettings.Parallels.BaseName) case "virtualbox": assertDefaultSSHLogin(t, s.RunnerSettings.SSH) require.NotNil(t, s.RunnerSettings.VirtualBox) assert.Equal(t, executor+"-vm-name", s.RunnerSettings.VirtualBox.BaseName) default: assert.FailNow(t, "no assertions found for executor", executor) } } func assertDefaultSSHLogin(t *testing.T, sshCfg *common.SshConfig) { require.NotNil(t, sshCfg) assert.Equal(t, "user", sshCfg.User) assert.Equal(t, "password", sshCfg.Password) assert.Equal(t, "/home/user/.ssh/id_rsa", sshCfg.IdentityFile) } func assertDefaultSSHServer(t *testing.T, sshCfg *common.SshConfig) { require.NotNil(t, sshCfg) assert.Equal(t, "gitlab.example.com", sshCfg.Host) assert.Equal(t, "22", sshCfg.Port) } func assertExecutorOverridenValues(t *testing.T, executor string, s *commands.RegisterCommand) { assert.Equal(t, "http://gitlab.example.com/", s.URL) assert.Equal(t, "test-runner-token", s.Token) assert.Equal(t, executor, s.RunnerSettings.Executor) switch executor { case "kubernetes": assert.NotNil(t, s.RunnerSettings.Kubernetes) case "custom": assert.NotNil(t, s.RunnerSettings.Custom) case "shell": assert.NotNil(t, s.RunnerSettings.Shell) if runtime.GOOS == osTypeWindows && s.RunnerConfig.Shell == "" { assert.Equal(t, "powershell", s.RunnerSettings.Shell) } case "docker": require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "nginx:latest", s.RunnerSettings.Docker.Image) case "docker-windows": require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "mcr.microsoft.com/windows/servercore:YYH2", s.RunnerSettings.Docker.Image) case "docker+machine": assert.NotNil(t, s.RunnerSettings.Machine) require.NotNil(t, s.RunnerSettings.Docker) assert.Equal(t, "nginx:latest", s.RunnerSettings.Docker.Image) case "ssh": assertOverridenSSHLogin(t, s.RunnerSettings.SSH) assertOverridenSSHServer(t, s.RunnerSettings.SSH) case "parallels": assertOverridenSSHServer(t, s.RunnerSettings.SSH) require.NotNil(t, s.RunnerSettings.Parallels) assert.Equal(t, "override-"+executor+"-vm-name", s.RunnerSettings.Parallels.BaseName) case "virtualbox": assertOverridenSSHLogin(t, s.RunnerSettings.SSH) require.NotNil(t, s.RunnerSettings.VirtualBox) assert.Equal(t, "override-"+executor+"-vm-name", s.RunnerSettings.VirtualBox.BaseName) default: assert.FailNow(t, "no assertions found for executor", executor) } } func assertOverridenSSHLogin(t *testing.T, sshCfg *common.SshConfig) { require.NotNil(t, sshCfg) assert.Equal(t, "root", sshCfg.User) assert.Equal(t, "admin", sshCfg.Password) assert.Equal(t, "/root/.ssh/id_rsa", sshCfg.IdentityFile) } func assertOverridenSSHServer(t *testing.T, sshCfg *common.SshConfig) { require.NotNil(t, sshCfg) assert.Equal(t, "ssh.gitlab.example.com", sshCfg.Host) assert.Equal(t, "8822", sshCfg.Port) } func executorAnswers(t *testing.T, executor string) []string { values := map[string][]string{ "kubernetes": {executor}, "custom": {executor}, "shell": {executor}, "docker": {executor, "busybox:latest"}, "docker-windows": {executor, "mcr.microsoft.com/windows/servercore:YYH1"}, "docker+machine": {executor, "busybox:latest"}, "ssh": {executor, "gitlab.example.com", "22", "user", "password", "/home/user/.ssh/id_rsa"}, "parallels": {executor, "parallels-vm-name", "gitlab.example.com", "22"}, "virtualbox": {executor, "virtualbox-vm-name", "user", "password", "/home/user/.ssh/id_rsa"}, } answers, ok := values[executor] if !ok { assert.FailNow(t, "No answers found for executor", executor) } return answers } func executorOverrideAnswers(t *testing.T, executor string) []string { values := map[string][]string{ "kubernetes": {""}, "custom": {""}, "shell": {""}, "docker": {"nginx:latest"}, "docker-windows": {"mcr.microsoft.com/windows/servercore:YYH2"}, "docker+machine": {"nginx:latest"}, "ssh": {"ssh.gitlab.example.com", "8822", "root", "admin", "/root/.ssh/id_rsa"}, "parallels": {"override-parallels-vm-name", "ssh.gitlab.example.com", "8822"}, "virtualbox": {"override-virtualbox-vm-name", "root", "admin", "/root/.ssh/id_rsa"}, } answers, ok := values[executor] if !ok { assert.FailNow(t, "No override answers found for executor", executor) } return answers } func executorCmdLineArgs(t *testing.T, executor string) []string { values := map[string][]string{ "kubernetes": {"--executor", executor}, "custom": {"--executor", executor}, "shell": {"--executor", executor}, "docker": {"--executor", executor, "--docker-image", "busybox:latest"}, "docker-windows": {"--executor", executor, "--docker-image", "mcr.microsoft.com/windows/servercore:YYH1"}, "docker+machine": {"--executor", executor, "--docker-image", "busybox:latest"}, "ssh": { "--executor", executor, "--ssh-host", "gitlab.example.com", "--ssh-port", "22", "--ssh-user", "user", "--ssh-password", "password", "--ssh-identity-file", "/home/user/.ssh/id_rsa", }, "parallels": { "--executor", executor, "--ssh-host", "gitlab.example.com", "--ssh-port", "22", "--parallels-base-name", "parallels-vm-name", }, "virtualbox": { "--executor", executor, "--ssh-host", "gitlab.example.com", "--ssh-user", "user", "--ssh-password", "password", "--ssh-identity-file", "/home/user/.ssh/id_rsa", "--virtualbox-base-name", "virtualbox-vm-name", }, } args, ok := values[executor] if !ok { assert.FailNow(t, "No command line args found for executor", executor) } return args } func TestExecute_MergeConfigTemplate(t *testing.T) { var ( configTemplateMergeInvalidConfiguration = `- , ;` configTemplateMergeAdditionalConfiguration = ` [[runners]] [runners.custom_build_dir] enabled = false [runners.kubernetes] [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" size_limit = "1G"` baseOutputConfigFmt = `concurrent = 1 check_interval = 0 connection_max_age = "15m0s" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = %q url = "http://gitlab.example.com/" id = 0 token = "test-runner-token" token_obtained_at = %s token_expires_at = 0001-01-01T00:00:00Z executor = "shell" shell = "pwsh" [runners.cache] MaxUploadedArchiveSize = 0 [runners.cache.s3] AssumeRoleMaxConcurrency = 0 [runners.cache.gcs] [runners.cache.azure] ` ) tests := map[string]struct { configTemplate string networkAssertions func(n *common.MockNetwork) errExpected bool expectedFileContentFmt string }{ "config template disabled": { configTemplate: "", networkAssertions: func(n *common.MockNetwork) { n.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ Token: "test-runner-token", }). Once() }, errExpected: false, expectedFileContentFmt: baseOutputConfigFmt, }, "config template with no additional runner configuration": { configTemplate: "[[runners]]", networkAssertions: func(n *common.MockNetwork) { n.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ Token: "test-runner-token", }). Once() }, errExpected: false, expectedFileContentFmt: baseOutputConfigFmt, }, "successful config template merge": { configTemplate: configTemplateMergeAdditionalConfiguration, networkAssertions: func(n *common.MockNetwork) { n.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ Token: "test-runner-token", }). Once() }, errExpected: false, expectedFileContentFmt: `concurrent = 1 check_interval = 0 connection_max_age = "15m0s" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = %q url = "http://gitlab.example.com/" id = 0 token = "test-runner-token" token_obtained_at = %s token_expires_at = 0001-01-01T00:00:00Z executor = "shell" shell = "pwsh" [runners.custom_build_dir] enabled = false [runners.cache] MaxUploadedArchiveSize = 0 [runners.cache.s3] AssumeRoleMaxConcurrency = 0 [runners.cache.gcs] [runners.cache.azure] `, }, "incorrect config template merge": { configTemplate: configTemplateMergeInvalidConfiguration, networkAssertions: func(n *common.MockNetwork) {}, errExpected: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { var err error if tt.errExpected { helpers.MakeFatalToPanic() } cfgTpl, cleanup := commands.PrepareConfigurationTemplateFile(t, tt.configTemplate) defer cleanup() network := common.NewMockNetwork(t) args := []string{ "--shell", shells.SNPwsh, "--registration-token", "test-runner-token", } if tt.configTemplate != "" { args = append(args, "--template-config", cfgTpl) } tt.networkAssertions(network) fileContent, _, err := testRegisterCommandRun(t, network, nil, "", args...) if tt.errExpected { require.Error(t, err) return } require.NoError(t, err) name, err := os.Hostname() require.NoError(t, err) assert.Equal(t, fmt.Sprintf(tt.expectedFileContentFmt, name, commands.RegisterTimeNowDate.Format(time.RFC3339)), fileContent) }) } } func TestUnregisterOnFailure(t *testing.T) { tests := map[string]struct { token string leaveRunner bool registrationFails bool expectsLeftRegistered bool }{ "ui created runner, verification succeeds, runner left registered": { token: "glrt-test-runner-token", leaveRunner: false, registrationFails: false, expectsLeftRegistered: true, }, "ui created runner, verification fails, LeaveRunner is false, runner machine is unregistered": { token: "glrt-test-runner-token", leaveRunner: false, registrationFails: true, expectsLeftRegistered: false, }, "ui created runner, verification fails, LeaveRunner is true, runner machine left registered": { token: "glrt-test-runner-token", leaveRunner: true, registrationFails: true, expectsLeftRegistered: true, }, "registration succeeds, runner left registered": { token: "test-runner-token", leaveRunner: false, registrationFails: false, expectsLeftRegistered: true, }, "registration fails, LeaveRunner is false, runner is unregistered": { token: "test-runner-token", leaveRunner: false, registrationFails: true, expectsLeftRegistered: false, }, "registration fails, LeaveRunner is true, runner left registered": { token: "test-runner-token", leaveRunner: true, registrationFails: true, expectsLeftRegistered: true, }, } for testName, testCase := range tests { t.Run(testName, func(t *testing.T) { runnerUICreated := strings.HasPrefix(testCase.token, "glrt-") network := common.NewMockNetwork(t) if runnerUICreated { network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 1, Token: testCase.token, }). Once() } else { network.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ Token: testCase.token, }). Once() } if !testCase.expectsLeftRegistered { credsMocker := mock.MatchedBy(func(credentials common.RunnerConfig) bool { return credentials.Token == testCase.token }) if runnerUICreated { network.On("UnregisterRunnerManager", credsMocker, mock.Anything). Return(true). Once() } else { network.On("UnregisterRunner", credsMocker). Return(true). Once() } } var arguments []string if testCase.leaveRunner { arguments = append(arguments, "--leave-runner") } arguments, cleanTempFile := useTempConfigFile(t, arguments) defer cleanTempFile() answers := []string{"https://gitlab.com/", testCase.token, "description"} if !runnerUICreated { answers = append(answers, "", "") } if testCase.registrationFails { defer func() { _ = recover() }() } else { answers = append(answers, "custom") // should not result in more answers required } cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(answers, "\n")+"\n")), network, testExecutorProviders(), ) app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } err := app.Run(append([]string{"runner", "register"}, arguments...)) assert.False(t, testCase.registrationFails) assert.NoError(t, err) }) } } func useTempConfigFile(t *testing.T, arguments []string) ([]string, func()) { configFile, err := os.CreateTemp("", "config.toml") require.NoError(t, err) err = configFile.Close() require.NoError(t, err) arguments = append(arguments, "--config", configFile.Name()) return arguments, func() { os.Remove(configFile.Name()) } } func TestNameIsNotRequestedOnServerFailureRegisterCommandWithAuthToken(t *testing.T) { network := common.NewMockNetwork(t) network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)).Return(nil).Once() var arguments []string arguments, cleanTempFile := useTempConfigFile(t, arguments) defer cleanTempFile() answers := []string{"https://gitlab.com/", "glrt-test-runner-token"} hook := test.NewGlobal() defer func() { var output string if r := recover(); r != nil { // log panics force exit if e, ok := r.(*logrus.Entry); ok { output = e.Message } } if output == "" { output = commands.GetLogrusOutput(t, hook) } assert.Equal(t, "Failed to verify the runner.", output) }() cmd := commands.NewRegisterCommandForTest( bufio.NewReader(strings.NewReader(strings.Join(answers, "\n")+"\n")), network, testExecutorProviders(), ) app := cli.NewApp() app.Commands = []cli.Command{ { Name: "register", Action: cmd.Execute, Flags: clihelpers.GetFlagsFromStruct(cmd), }, } _ = app.Run(append([]string{"runner", "register"}, arguments...)) assert.Fail(t, "Should not reach this point") } func TestRegisterCommand(t *testing.T) { type testCase struct { condition func() bool token string arguments []string environment []kv expectedConfigs []string } testCases := map[string]testCase{ "runner ID is included in config": { token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", }, expectedConfigs: []string{`id = 12345`, `token = "glrt-test-runner-token"`}, }, "registration token is accepted": { token: "test-runner-token", arguments: []string{ "--registration-token", "test-runner-token", "--name", "test-runner", }, expectedConfigs: []string{`id = 12345`, `token = "test-runner-token"`}, }, "authentication token is accepted in --registration-token": { token: "glrt-test-runner-token", arguments: []string{ "--registration-token", "glrt-test-runner-token", "--name", "test-runner", }, expectedConfigs: []string{`id = 12345`, `token = "glrt-test-runner-token"`}, }, "feature flags are included in config": { token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", "--feature-flags", "FF_TEST_1:true", "--feature-flags", "FF_TEST_2:false", }, expectedConfigs: []string{`[runners.feature_flags] FF_TEST_1 = true FF_TEST_2 = false`}, }, "shell defaults to pwsh on Windows with shell executor": { condition: func() bool { return runtime.GOOS == osTypeWindows }, token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", "--executor", "shell", }, expectedConfigs: []string{`shell = "pwsh"`}, }, "shell defaults to pwsh on Windows with docker-windows executor": { condition: func() bool { return runtime.GOOS == osTypeWindows }, token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", "--executor", "docker-windows", "--docker-image", "abc", }, expectedConfigs: []string{`shell = "pwsh"`}, }, "shell can be overridden to powershell on Windows with shell executor": { condition: func() bool { return runtime.GOOS == osTypeWindows }, token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", "--executor", "shell", "--shell", "powershell", }, expectedConfigs: []string{`shell = "powershell"`}, }, "shell can be overridden to powershell on Windows with docker-windows executor": { condition: func() bool { return runtime.GOOS == osTypeWindows }, token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--name", "test-runner", "--executor", "docker-windows", "--shell", "powershell", "--docker-image", "abc", }, expectedConfigs: []string{`shell = "powershell"`}, }, "kubernetes security context namespace": { token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--executor", "kubernetes", }, environment: []kv{ { key: "KUBERNETES_BUILD_CONTAINER_SECURITY_CONTEXT_PRIVILEGED", value: "true", }, { key: "KUBERNETES_HELPER_CONTAINER_SECURITY_CONTEXT_RUN_AS_USER", value: "1000", }, { key: "KUBERNETES_SERVICE_CONTAINER_SECURITY_CONTEXT_RUN_AS_NON_ROOT", value: "true", }, { key: "KUBERNETES_SERVICE_CONTAINER_SECURITY_CONTEXT_CAPABILITIES_ADD", value: "NET_RAW, NET_RAW1", }, }, expectedConfigs: []string{` [runners.kubernetes.build_container_security_context] privileged = true`, ` [runners.kubernetes.helper_container_security_context] run_as_user = 1000`, ` [runners.kubernetes.service_container_security_context] run_as_non_root = true`, ` [runners.kubernetes.service_container_security_context.capabilities] add = ["NET_RAW, NET_RAW1"]`, }, }, "s3 cache AuthenticationType arg": { token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", "--cache-s3-authentication_type=iam", }, expectedConfigs: []string{` [runners.cache.s3] AuthenticationType = "iam" `}, }, "s3 cache AuthenticationType env": { token: "glrt-test-runner-token", arguments: []string{ "--token", "glrt-test-runner-token", }, environment: []kv{ { key: "CACHE_S3_AUTHENTICATION_TYPE", value: "iam", }, }, expectedConfigs: []string{` [runners.cache.s3] AuthenticationType = "iam" `}, }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { if tc.condition != nil && !tc.condition() { t.Skip() } network := common.NewMockNetwork(t) if strings.HasPrefix(tc.token, "glrt-") { network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 12345, Token: tc.token, }). Once() } else { network.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ ID: 12345, Token: tc.token, }). Once() } gotConfig, _, err := testRegisterCommandRun(t, network, tc.environment, "", tc.arguments...) require.NoError(t, err) for _, expectedConfig := range tc.expectedConfigs { assert.Contains(t, spaceReplacer.Replace(gotConfig), spaceReplacer.Replace(expectedConfig)) } }) } } func TestRegisterWithAuthenticationTokenTwice(t *testing.T) { token := "glrt-test-runner-token" arguments := []string{ "--token", token, "--name", "test-runner", } network := common.NewMockNetwork(t) network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 12345, Token: token, }). Times(2) config, output, err := testRegisterCommandRun(t, network, []kv{}, "", arguments...) require.NoError(t, err) require.NotContains(t, output, "A runner with this system ID and token has already been registered.") // Second time should result in a warning _, output, err = testRegisterCommandRun(t, network, []kv{}, config, arguments...) require.NoError(t, err) require.Contains(t, output, "A runner with this system ID and token has already been registered.") } func TestRegisterTokenExpiresAt(t *testing.T) { type testCase struct { token string expiration time.Time expectedConfig string } testCases := map[string]testCase{ "no expiration": { token: "test-runner-token", expectedConfig: `token = "test-runner-token" token_obtained_at = %s token_expires_at = 0001-01-01T00:00:00Z`, }, "token expiration": { token: "test-runner-token", expiration: time.Date(2594, 7, 21, 15, 42, 53, 0, time.UTC), expectedConfig: `token = "test-runner-token" token_obtained_at = %s token_expires_at = 2594-07-21T15:42:53Z`, }, "no expiration with authentication token": { token: "glrt-test-runner-token", expectedConfig: `token = "glrt-test-runner-token" token_obtained_at = %s token_expires_at = 0001-01-01T00:00:00Z`, }, "token expiration with authentication token": { token: "glrt-test-runner-token", expiration: time.Date(2594, 7, 21, 15, 42, 53, 0, time.UTC), expectedConfig: `token = "glrt-test-runner-token" token_obtained_at = %s token_expires_at = 2594-07-21T15:42:53Z`, }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { network := common.NewMockNetwork(t) if strings.HasPrefix(tc.token, "glrt-") { network.On("VerifyRunner", mock.Anything, mock.MatchedBy(isValidToken)). Return(&common.VerifyRunnerResponse{ ID: 12345, Token: tc.token, TokenExpiresAt: tc.expiration, }). Once() } else { network.On("RegisterRunner", mock.Anything, mock.Anything). Return(&common.RegisterRunnerResponse{ Token: tc.token, TokenExpiresAt: tc.expiration, }). Once() } gotConfig, _, err := testRegisterCommandRun(t, network, []kv{}, "", "--registration-token", tc.token, "--name", "test-runner") require.NoError(t, err) assert.Contains( t, spaceReplacer.Replace(gotConfig), spaceReplacer.Replace(fmt.Sprintf(tc.expectedConfig, commands.RegisterTimeNowDate.Format(time.RFC3339))), ) }) } } func testExecutorProviders() *executors.ProviderRegistry { dockerProvider := docker.NewProvider() runnerCommand := "gitlab-runner" return executors.NewProviderRegistry(map[string]common.ExecutorProvider{ "custom": custom.NewProvider(runnerCommand), "docker": dockerProvider, "docker+machine": machine.NewProvider(dockerProvider), "docker-windows": docker.NewWindowsProvider(), "parallels": parallels.NewProvider(), "shell": shell.NewProvider(runnerCommand), "ssh": ssh.NewProvider(), "virtualbox": virtualbox.NewProvider(), common.ExecutorKubernetes: kubernetes.NewProvider(), }) } ================================================ FILE: commands/register_test.go ================================================ //go:build !integration package commands import ( "errors" "flag" "fmt" "testing" "dario.cat/mergo" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" ) func setupDockerRegisterCommand(dockerConfig *common.DockerConfig) *RegisterCommand { fs := flag.NewFlagSet("", flag.ExitOnError) ctx := cli.NewContext(cli.NewApp(), fs, nil) fs.String("docker-image", "ruby:3.3", "") s := &RegisterCommand{ context: ctx, NonInteractive: true, } s.Docker = dockerConfig return s } func TestRegisterDefaultDockerCacheVolume(t *testing.T) { s := setupDockerRegisterCommand(&common.DockerConfig{ Volumes: []string{}, }) s.askDocker() assert.Equal(t, 1, len(s.Docker.Volumes)) assert.Equal(t, "/cache", s.Docker.Volumes[0]) } func TestDoNotRegisterDefaultDockerCacheVolumeWhenDisableCache(t *testing.T) { s := setupDockerRegisterCommand(&common.DockerConfig{ Volumes: []string{}, DisableCache: true, }) s.askDocker() assert.Len(t, s.Docker.Volumes, 0) } func TestRegisterCustomDockerCacheVolume(t *testing.T) { s := setupDockerRegisterCommand(&common.DockerConfig{ Volumes: []string{"/cache"}, }) s.askDocker() assert.Equal(t, 1, len(s.Docker.Volumes)) assert.Equal(t, "/cache", s.Docker.Volumes[0]) } func TestRegisterCustomMappedDockerCacheVolume(t *testing.T) { s := setupDockerRegisterCommand(&common.DockerConfig{ Volumes: []string{"/my/cache:/cache"}, }) s.askDocker() assert.Equal(t, 1, len(s.Docker.Volumes)) assert.Equal(t, "/my/cache:/cache", s.Docker.Volumes[0]) } func TestConfigTemplate_Enabled(t *testing.T) { tests := map[string]struct { path string expectedValue bool }{ "configuration file defined": { path: "/path/to/file", expectedValue: true, }, "configuration file not defined": { path: "", expectedValue: false, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { configTemplate := &configTemplate{ConfigFile: tc.path} assert.Equal(t, tc.expectedValue, configTemplate.Enabled()) }) } } var ( configTemplateMergeToInvalidConfiguration = `- , ;` configTemplateMergeToEmptyConfiguration = `` configTemplateMergeToTwoRunnerSectionsConfiguration = ` [[runners]] [[runners]]` configTemplateMergeToOverwritingConfiguration = ` [[runners]] token = "different_token" executor = "docker" limit = 100` configTemplateMergeToAdditionalConfiguration = ` [[runners]] [runners.kubernetes] [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" size_limit = "1G"` configTemplateMergeToBaseConfiguration = &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-runner-token", }, RunnerSettings: common.RunnerSettings{ Executor: "shell", }, } ) func TestConfigTemplate_MergeTo(t *testing.T) { tests := map[string]struct { templateContent string config *common.RunnerConfig expectedError error assertConfiguration func(t *testing.T, config *common.RunnerConfig) }{ "invalid template file": { templateContent: configTemplateMergeToInvalidConfiguration, config: configTemplateMergeToBaseConfiguration, expectedError: errors.New("couldn't load configuration template file: decoding configuration file: toml: line 1: expected '.' or '=', but got ',' instead"), }, "no runners in template": { templateContent: configTemplateMergeToEmptyConfiguration, config: configTemplateMergeToBaseConfiguration, expectedError: errors.New("configuration template must contain exactly one [[runners]] entry"), }, "multiple runners in template": { templateContent: configTemplateMergeToTwoRunnerSectionsConfiguration, config: configTemplateMergeToBaseConfiguration, expectedError: errors.New("configuration template must contain exactly one [[runners]] entry"), }, "template doesn't overwrite existing settings": { templateContent: configTemplateMergeToOverwritingConfiguration, config: configTemplateMergeToBaseConfiguration, assertConfiguration: func(t *testing.T, config *common.RunnerConfig) { assert.Equal(t, configTemplateMergeToBaseConfiguration.Token, config.RunnerCredentials.Token) assert.Equal(t, configTemplateMergeToBaseConfiguration.Executor, config.RunnerSettings.Executor) assert.Equal(t, 100, config.Limit) }, expectedError: nil, }, "template doesn't overwrite token if none provided in base": { templateContent: configTemplateMergeToOverwritingConfiguration, config: &common.RunnerConfig{}, assertConfiguration: func(t *testing.T, config *common.RunnerConfig) { assert.Equal(t, "", config.Token) }, }, "template adds additional content": { templateContent: configTemplateMergeToAdditionalConfiguration, config: configTemplateMergeToBaseConfiguration, assertConfiguration: func(t *testing.T, config *common.RunnerConfig) { k8s := config.RunnerSettings.Kubernetes require.NotNil(t, k8s) require.NotEmpty(t, k8s.Volumes.EmptyDirs) assert.Len(t, k8s.Volumes.EmptyDirs, 1) emptyDir := k8s.Volumes.EmptyDirs[0] assert.Equal(t, "empty_dir", emptyDir.Name) assert.Equal(t, "/path/to/empty_dir", emptyDir.MountPath) assert.Equal(t, "Memory", emptyDir.Medium) assert.Equal(t, "1G", emptyDir.SizeLimit) }, expectedError: nil, }, "error on merging": { templateContent: configTemplateMergeToAdditionalConfiguration, expectedError: fmt.Errorf( "error while merging configuration with configuration template: %w", mergo.ErrNotSupported, ), }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { file, cleanup := PrepareConfigurationTemplateFile(t, tc.templateContent) defer cleanup() configTemplate := &configTemplate{ConfigFile: file} err := configTemplate.MergeTo(tc.config) if tc.expectedError != nil { assert.ErrorContains(t, err, tc.expectedError.Error()) return } assert.NoError(t, err) tc.assertConfiguration(t, tc.config) }) } } func TestSetFipsHelperImageFlavor(t *testing.T) { tests := map[string]struct { fipsEnabled bool dockerConfig *common.DockerConfig k8sConfig *common.KubernetesConfig expectedDockerFlavor string expectedK8sFlavor string }{ "Docker, fips disabled, no flavor, no changes": { dockerConfig: &common.DockerConfig{}, }, "Docker, fips disabled, existing flavor, no changes": { dockerConfig: &common.DockerConfig{HelperImageFlavor: "blammo"}, expectedDockerFlavor: "blammo", }, "Docker, fips enabled, no flavor, update config": { fipsEnabled: true, dockerConfig: &common.DockerConfig{}, expectedDockerFlavor: "ubi-fips", }, "Docker, fips enabled, existing flavor, no changes": { fipsEnabled: true, dockerConfig: &common.DockerConfig{HelperImageFlavor: "blammo"}, expectedDockerFlavor: "blammo", }, "Kubernetes, fips disabled, no flavor, no changes": { k8sConfig: &common.KubernetesConfig{}, }, "Kubernetes, fips disabled, existing flavor, no changes": { k8sConfig: &common.KubernetesConfig{HelperImageFlavor: "blammo"}, expectedK8sFlavor: "blammo", }, "Kubernetes, fips enabled, no flavor, update config": { fipsEnabled: true, k8sConfig: &common.KubernetesConfig{}, expectedK8sFlavor: "ubi-fips", }, "Kubernetes, fips enabled, existing flavor, no changes": { fipsEnabled: true, k8sConfig: &common.KubernetesConfig{HelperImageFlavor: "blammo"}, expectedK8sFlavor: "blammo", }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { // Create a test runner config cfg := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: tt.dockerConfig, Kubernetes: tt.k8sConfig, }, } setFipsHelperImageFlavor(cfg, func() bool { return tt.fipsEnabled }) if cfg.Docker != nil { assert.Equal(t, tt.expectedDockerFlavor, cfg.Docker.HelperImageFlavor) } if cfg.Kubernetes != nil { assert.Equal(t, tt.expectedK8sFlavor, cfg.Kubernetes.HelperImageFlavor) } }) } } ================================================ FILE: commands/register_windows_test.go ================================================ //go:build !integration package commands import ( "fmt" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/shell" "gitlab.com/gitlab-org/gitlab-runner/network" "gitlab.com/gitlab-org/gitlab-runner/shells" ) func TestRegisterDefaultWindowsDockerCacheVolume(t *testing.T) { testCases := map[string]struct { userDefinedVolumes []string expectedVolumes []string }{ "user did not define anything": { userDefinedVolumes: []string{}, expectedVolumes: []string{defaultDockerWindowCacheDir}, }, "user defined an extra volume": { userDefinedVolumes: []string{"c:\\Users\\SomeUser\\config.json:c:\\config.json"}, expectedVolumes: []string{defaultDockerWindowCacheDir, "c:\\Users\\SomeUser\\config.json:c:\\config.json"}, }, "user defined volume binding to default cache dir": { userDefinedVolumes: []string{fmt.Sprintf("c:\\Users\\SomeUser\\cache:%s", defaultDockerWindowCacheDir)}, expectedVolumes: []string{fmt.Sprintf("c:\\Users\\SomeUser\\cache:%s", defaultDockerWindowCacheDir)}, }, "user defined cache as source leads to incorrect parsing of volume and never adds cache volume": { userDefinedVolumes: []string{"c:\\cache:c:\\User\\ContainerAdministrator\\cache"}, expectedVolumes: []string{"c:\\cache:c:\\User\\ContainerAdministrator\\cache"}, }, } for name, testCase := range testCases { t.Run(name, func(t *testing.T) { s := setupDockerRegisterCommand(&common.DockerConfig{ Volumes: testCase.userDefinedVolumes, }) s.askDockerWindows() assert.ElementsMatch(t, testCase.expectedVolumes, s.Docker.Volumes) }) } } func TestDefaultWindowsShell(t *testing.T) { tests := []struct { shell string expectedShell string }{ { shell: "powershell", expectedShell: shells.SNPowershell, }, { shell: "pwsh", expectedShell: shells.SNPwsh, }, { shell: "", expectedShell: shells.SNPwsh, }, } for _, tt := range tests { t.Run(tt.shell, func(t *testing.T) { n := network.NewGitLabClient() cmd := newRegisterCommand(n, executors.NewProviderRegistry(map[string]common.ExecutorProvider{ "shell": shell.NewProvider("gitlab-runner"), })) cmd.Shell = tt.shell cmd.Executor = "shell" cmd.askExecutorOptions() assert.Equal(t, tt.expectedShell, cmd.Shell) }) } } ================================================ FILE: commands/reset_token.go ================================================ package commands import ( "log" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" ) type ResetTokenCommand struct { *common.RunnerCredentials network common.Network ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` Name string `short:"n" long:"name" description:"Name of the runner whose token you wish to reset (as defined in the configuration file)"` URL string `short:"u" long:"url" description:"URL of the runner whose token you wish to reset (as defined in the configuration file)"` ID int64 `short:"i" long:"id" description:"ID of the runner whose token you wish to reset (as defined in the configuration file)"` AllRunners bool `long:"all-runners" description:"Reset all runner authentication tokens"` PAT string `long:"pat" description:"Personal access token to use in lieu of runner's old authentication token"` } func NewResetTokenCommand(n common.Network) cli.Command { return common.NewCommand("reset-token", "reset a runner's token", &ResetTokenCommand{ network: n, }) } func (c *ResetTokenCommand) resetAllRunnerTokens(cfg *common.Config) { logrus.Warningln("Resetting all runner authentication tokens") for _, r := range cfg.Runners { if !common.ResetToken(c.network, r, "", c.PAT) { logrus.WithField("name", r.Name).Errorln("Failed to reset runner authentication token") } } } func (c *ResetTokenCommand) resetSingleRunnerToken(cfg *common.Config) bool { runnerCredentials, err := c.getRunnerCredentials(cfg) if err != nil { logrus.WithError(err).Fatalln("Couldn't get runner credentials") } if runnerCredentials == nil { logrus.Fatalln("No runner provided") return false } // Reset Token of the runner if !common.ResetToken(c.network, runnerCredentials, "", c.PAT) { logrus.WithFields(logrus.Fields{ "name": c.Name, "id": c.ID, }).Fatalln("Failed to reset runner authentication token") return false } return true } func (c *ResetTokenCommand) getRunnerCredentials(cfg *common.Config) (*common.RunnerConfig, error) { if c.Name != "" { runnerConfig, err := cfg.RunnerByName(c.Name) if err != nil { return nil, err } return runnerConfig, nil } runnerConfig, err := cfg.RunnerByURLAndID(c.URL, c.ID) if err != nil { return nil, err } return runnerConfig, nil } func (c *ResetTokenCommand) Execute(_context *cli.Context) { userModeWarning(true) cfg := configfile.New(c.ConfigFile) if err := cfg.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { if c.AllRunners { c.resetAllRunnerTokens(cfg) } else { c.resetSingleRunnerToken(cfg) } return nil })); err != nil { logrus.WithError(err).Fatalln("Failed to load configuration") } if err := cfg.Save(); err != nil { logrus.WithError(err).Fatalln("Failed to update configuration") } log.Println("Updated") } ================================================ FILE: commands/service.go ================================================ package commands import ( "fmt" "os" "os/user" "path/filepath" "runtime" "github.com/kardianos/service" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/homedir" service_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/service" ) const ( defaultServiceName = "gitlab-runner" defaultDescription = "GitLab Runner" ) type NullService struct{} func (n *NullService) Start(s service.Service) error { return nil } func (n *NullService) Stop(s service.Service) error { return nil } func runServiceInstall(s service.Service, c *cli.Context) error { if c.String("user") == "" && c.String("init-user") == "" && os.Getuid() == 0 { logrus.Fatal("Please specify user that will run gitlab-runner service") } if configFile := c.String("config"); configFile != "" { // try to load existing config config := common.NewConfig() err := config.LoadConfig(configFile) if err != nil { return err } // save config for the first time if !config.Loaded { err = config.SaveConfig(configFile) if err != nil { return err } } } return service.Control(s, "install") } func runServiceStatus(displayName string, s service.Service) { status, err := s.Status() description := "" switch status { case service.StatusRunning: description = "Service is running" case service.StatusStopped: description = "Service has stopped" default: description = "Service status unknown" if err != nil { description = err.Error() } } if status != service.StatusRunning { fmt.Fprintf(os.Stderr, "%s: %s\n", displayName, description) os.Exit(1) } fmt.Printf("%s: %s\n", displayName, description) } func getUserHomeDir(username string) string { u, err := user.Lookup(username) if err != nil { panic(fmt.Sprintf("Failed to get home for user %q: %s", username, err.Error())) } return u.HomeDir } func GetServiceArguments(c *cli.Context) (arguments []string) { // Update the default config-file path if it was not actually set and --init-user was specified... config := c.String("config") if !c.IsSet("config") && c.String("init-user") != "" { config = filepath.Join(getUserHomeDir(c.String("init-user")), "config.toml") } arguments = append(arguments, "--config", config) applyStrArg(c, "working-directory", false, func(val string) { arguments = append(arguments, "--working-directory", val) }) applyStrArg(c, "service", false, func(val string) { arguments = append(arguments, "--service", val) }) // syslogging doesn't make sense for systemd systems as those log straight to journald syslog := !c.IsSet("syslog") || c.Bool("syslog") if service.Platform() == "linux-systemd" && !c.IsSet("syslog") { syslog = false } if syslog { arguments = append(arguments, "--syslog") } return } func createServiceConfig(c *cli.Context) *service.Config { config := &service.Config{ Name: c.String("service"), DisplayName: c.String("service"), Description: defaultDescription, Arguments: append([]string{"run"}, GetServiceArguments(c)...), } // setup os specific service config setupOSServiceConfig(c, config) return config } func RunServiceControl(c *cli.Context) { if c.String("user") != "" && c.String("init-user") != "" { logrus.Fatal("Only one of 'user' or 'init-user' can be specified.") } svcConfig := createServiceConfig(c) s, err := service_helpers.New(&NullService{}, svcConfig) if err != nil { logrus.Fatal(err) } switch c.Command.Name { case "install": err = runServiceInstall(s, c) case "status": runServiceStatus(svcConfig.DisplayName, s) default: err = service.Control(s, c.Command.Name) } if err != nil { logrus.Fatal(err) } } func GetFlags() []cli.Flag { flags := []cli.Flag{ cli.StringFlag{ Name: "service, n", Value: defaultServiceName, Usage: "Specify service name to use", }, } if os.Getuid() > 0 { flags = append(flags, cli.BoolFlag{ Name: "user-service", Usage: "Manage gitlab-runner as a user service (systemd only)", }, ) } return flags } func GetInstallFlags() []cli.Flag { installFlags := GetFlags() installFlags = append( installFlags, cli.StringFlag{ Name: "working-directory, d", Value: homedir.New().GetWDOrEmpty(), Usage: "Specify custom root directory where all data are stored", }, cli.StringFlag{ Name: "config, c", Value: GetDefaultConfigFile(), Usage: "Specify custom config file", }, cli.BoolFlag{ Name: "syslog", Usage: "Setup system logging integration", }, ) if runtime.GOOS == osTypeWindows { installFlags = append( installFlags, cli.StringFlag{ Name: "user, u", Value: "", Usage: "Specify user-name to secure the runner", }, cli.StringFlag{ Name: "password, p", Value: "", Usage: "Specify user password to install service (required)", }) } else if os.Getuid() == 0 { installFlags = append(installFlags, cli.StringFlag{ Name: "user, u", Value: "", Usage: "Specify user-name to secure the runner", }, cli.StringFlag{ Name: "init-user, i", Value: "", Usage: "Specify user-name to secure the runner in the init script or systemd unit file", }) } return installFlags } func NewServiceCommands() []cli.Command { flags := GetFlags() installFlags := GetInstallFlags() return []cli.Command{ common.NewCommand("install", "install service", common.CommanderFunc(RunServiceControl), installFlags...), common.NewCommand("uninstall", "uninstall service", common.CommanderFunc(RunServiceControl), flags...), common.NewCommand("start", "start service", common.CommanderFunc(RunServiceControl), flags...), common.NewCommand("stop", "stop service", common.CommanderFunc(RunServiceControl), flags...), common.NewCommand("restart", "restart service", common.CommanderFunc(RunServiceControl), flags...), common.NewCommand("status", "get status of a service", common.CommanderFunc(RunServiceControl), flags...), } } // applyStrArg applies the named string-typed runtime argument to the service configuration in whatever way the `apply` // function dictates. func applyStrArg(c *cli.Context, argname string, rootonly bool, apply func(val string)) { argval := c.String(argname) if argval == "" { return } if rootonly && os.Getuid() != 0 { logrus.Fatalf("The --%s is not supported for non-root users", argname) } apply(argval) } ================================================ FILE: commands/service_darwin.go ================================================ package commands import ( "os" "github.com/kardianos/service" "github.com/urfave/cli" ) func setupOSServiceConfig(c *cli.Context, config *service.Config) { config.Option = service.KeyValue{ "KeepAlive": true, "RunAtLoad": true, "UserService": os.Getuid() != 0, } applyStrArg(c, "user", true, func(val string) { config.Arguments = append(config.Arguments, "--user", val) }) applyStrArg(c, "init-user", true, func(val string) { config.UserName = val }) } ================================================ FILE: commands/service_integration_test.go ================================================ //go:build integration package commands_test import ( "fmt" "slices" "testing" "github.com/kardianos/service" "github.com/stretchr/testify/assert" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands" "gitlab.com/gitlab-org/gitlab-runner/helpers/homedir" ) func newTestGetServiceArgumentsCommand(t *testing.T, expectedArgs []string) func(*cli.Context) { return func(c *cli.Context) { arguments := commands.GetServiceArguments(c) for _, arg := range expectedArgs { assert.Contains(t, arguments, arg) } } } func testServiceCommandRun(command func(*cli.Context), args ...string) { app := cli.NewApp() app.Commands = []cli.Command{ { Name: "test-command", Action: command, Flags: commands.GetInstallFlags(), }, } args = append([]string{"binary", "test-command"}, args...) _ = app.Run(args) } type getServiceArgumentsTestCase struct { cliFlags []string expectedArgs []string } func TestGetServiceArguments(t *testing.T) { tests := []getServiceArgumentsTestCase{ { expectedArgs: []string{ "--working-directory", homedir.New().GetWDOrEmpty(), "--config", commands.GetDefaultConfigFile(), "--service", "gitlab-runner", }, }, { cliFlags: []string{ "--config", "/tmp/config.toml", }, expectedArgs: []string{ "--working-directory", homedir.New().GetWDOrEmpty(), "--config", "/tmp/config.toml", "--service", "gitlab-runner", }, }, { cliFlags: []string{ "--working-directory", "/tmp", }, expectedArgs: []string{ "--working-directory", "/tmp", "--config", commands.GetDefaultConfigFile(), "--service", "gitlab-runner", }, }, { cliFlags: []string{ "--service", "gitlab-runner-service-name", }, expectedArgs: []string{ "--working-directory", homedir.New().GetWDOrEmpty(), "--config", commands.GetDefaultConfigFile(), "--service", "gitlab-runner-service-name", }, }, { cliFlags: []string{ "--syslog=true", }, expectedArgs: []string{ "--working-directory", homedir.New().GetWDOrEmpty(), "--config", commands.GetDefaultConfigFile(), "--service", "gitlab-runner", }, }, { cliFlags: []string{ "--syslog=false", }, expectedArgs: []string{ "--working-directory", homedir.New().GetWDOrEmpty(), "--config", commands.GetDefaultConfigFile(), "--service", "gitlab-runner", }, }, } for id, testCase := range tests { t.Run(fmt.Sprintf("case-%d", id), func(t *testing.T) { if service.Platform() != "linux-systemd" && !slices.Contains(testCase.cliFlags, "--syslog=false") { testCase.expectedArgs = append(testCase.expectedArgs, "--syslog") } testServiceCommandRun(newTestGetServiceArgumentsCommand(t, testCase.expectedArgs), testCase.cliFlags...) }) } } ================================================ FILE: commands/service_linux.go ================================================ package commands import ( "github.com/kardianos/service" "github.com/urfave/cli" service_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/service" ) func setupOSServiceConfig(c *cli.Context, config *service.Config) { applyStrArg(c, "user", true, func(val string) { config.Arguments = append(config.Arguments, "--user", val) }) applyStrArg(c, "init-user", true, func(val string) { config.UserName = val }) switch service.Platform() { case "linux-systemd": config.Dependencies = []string{ "After=network.target", } config.Option = service.KeyValue{ "Restart": "always", "UserService": c.IsSet("user-service"), } case "unix-systemv": script := service_helpers.SysvScript() if script != "" { config.Option = service.KeyValue{ "SysvScript": script, } } } } ================================================ FILE: commands/service_portable.go ================================================ //go:build !linux && !darwin && !windows package commands import ( "github.com/kardianos/service" "github.com/urfave/cli" ) func setupOSServiceConfig(c *cli.Context, config *service.Config) { // not supported } ================================================ FILE: commands/service_windows.go ================================================ package commands import ( "github.com/kardianos/service" "github.com/urfave/cli" ) func setupOSServiceConfig(c *cli.Context, config *service.Config) { config.Option = service.KeyValue{ "Password": c.String("password"), } config.UserName = c.String("user") } ================================================ FILE: commands/single.go ================================================ package commands import ( "context" "os" "os/signal" "sync/atomic" "syscall" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors" ) type RunSingleCommand struct { common.RunnerConfig network common.Network executorProviders executors.Providers WaitTimeout int `long:"wait-timeout" description:"How long to wait in seconds before receiving the first job"` lastBuild time.Time runForever bool MaxBuilds int `long:"max-builds" description:"How many builds to process before exiting"` finished atomic.Bool interruptSignals chan os.Signal ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` RunnerName string `short:"r" long:"runner" description:"Runner name from the config file to use instead of command-line arguments"` shutdownTimeout int `long:"shutdown-timeout" description:"Number of seconds after which the forceful shutdown operation will timeout and process will exit"` } func waitForInterrupts( finished *atomic.Bool, abortSignal chan os.Signal, doneSignal chan int, interruptSignals chan os.Signal, shutdownTimeout time.Duration, ) { if interruptSignals == nil { interruptSignals = make(chan os.Signal) } signal.Notify(interruptSignals, os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) interrupt := <-interruptSignals if finished != nil { finished.Store(true) } // request stop, but wait for force exit for interrupt == syscall.SIGQUIT { logrus.Warningln("Requested quit, waiting for builds to finish") interrupt = <-interruptSignals } logrus.Warningln("Requested exit:", interrupt) go func() { for { abortSignal <- interrupt } }() select { case newSignal := <-interruptSignals: logrus.Fatalln("forced exit:", newSignal) case <-time.After(shutdownTimeout): logrus.Fatalln("shutdown timed out") case <-doneSignal: } } // Things to do after a build func (r *RunSingleCommand) postBuild() { if r.MaxBuilds > 0 { r.MaxBuilds-- } r.lastBuild = time.Now() } func (r *RunSingleCommand) processBuild(data common.ExecutorData, abortSignal chan os.Signal, provider common.ExecutorProvider) error { jobData, healthy := r.network.RequestJob(context.Background(), r.RunnerConfig, nil) if !healthy { logrus.Println("Runner is not healthy!") select { case <-time.After(common.NotHealthyCheckInterval * time.Second): case <-abortSignal: } return nil } if jobData == nil { select { case <-time.After(common.CheckInterval): case <-abortSignal: } return nil } config := common.NewConfig() newBuild, err := common.NewBuild(*jobData, &r.RunnerConfig, abortSignal, data, provider) if err != nil { return err } jobCredentials := &common.JobCredentials{ ID: jobData.ID, Token: jobData.Token, } trace, err := r.network.ProcessJob(r.RunnerConfig, jobCredentials) if err != nil { return err } trace.SetDebugModeEnabled(newBuild.IsDebugModeEnabled()) updateResult := r.network.UpdateJob(r.RunnerConfig, jobCredentials, common.UpdateJobInfo{ ID: jobCredentials.ID, State: common.Running, }) if updateResult.State == common.UpdateAbort || updateResult.CancelRequested { trace.Finish() return nil } defer func() { err := trace.Success() logTerminationError(logrus.StandardLogger(), "Success", err) }() log := logrus.WithFields(nil) tracingFeature := jobData.Features.Tracing tr, stop := tracer(log, tracingFeature) defer func() { stopErr := stop() if stopErr != nil { log.WithError(stopErr).Warn("Error stopping trace provider") } }() ctx := tracerContext(context.Background(), log, tracingFeature) ctx, span := tr.Start(ctx, spanNameJobExecution) defer span.End() defer func() { span.SetAttributes(spanAttrJobStatus.String(newBuild.CurrentState().String())) }() setJobSpanAttributes(span, newBuild, &r.RunnerConfig) _ = ctx // we'll need it later err = newBuild.Run(config, trace) r.postBuild() return err } func (r *RunSingleCommand) checkFinishedConditions() { if r.MaxBuilds < 1 && !r.runForever { logrus.Println("This runner has processed its build limit, so now exiting") r.finished.Store(true) } if r.WaitTimeout > 0 && int(time.Since(r.lastBuild).Seconds()) > r.WaitTimeout { logrus.Println("This runner has not received a job in", r.WaitTimeout, "seconds, so now exiting") r.finished.Store(true) } } func (r *RunSingleCommand) HandleArgs() { if r.RunnerName != "" { cfg := configfile.New(r.ConfigFile) if err := cfg.Load(); err != nil { logrus.Fatalf("Error loading config: %v", err) } runner, err := cfg.Config().RunnerByName(r.RunnerName) if err != nil { logrus.Fatalf("Error loading runner by name: %v", err) } r.RunnerConfig = *runner } if r.URL == "" { logrus.Fatalln("Missing URL") } if r.Token == "" { logrus.Fatalln("Missing Token") } if r.Executor == "" { logrus.Fatalln("Missing Executor") } } func (r *RunSingleCommand) Execute(c *cli.Context) { err := process.EnsureSubprocessTerminationOnExit() if err != nil { logrus.WithError(err).Warn("Failed to wrap process in job object") } r.HandleArgs() executorProvider := r.executorProviders.GetByName(r.Executor) if executorProvider == nil { logrus.Fatalln("Unknown executor:", r.Executor) } managedProvider, ok := executorProvider.(common.ManagedExecutorProvider) if ok { managedProvider.Init() } if r.RunnerConfig.SystemID == "" { systemID, err := configfile.GenerateUniqueSystemID() if err != nil { logrus.WithError(err).Fatal("Failed to generate random system ID") } r.RunnerConfig.SystemID = systemID } logrus.Println("Starting runner for", r.URL, "with token", r.ShortDescription(), "...") abortSignal := make(chan os.Signal) doneSignal := make(chan int, 1) r.runForever = r.MaxBuilds == 0 go waitForInterrupts(&r.finished, abortSignal, doneSignal, r.interruptSignals, r.getShutdownTimeout()) r.lastBuild = time.Now() for !r.finished.Load() { data, err := executorProvider.Acquire(&r.RunnerConfig) if err != nil { logrus.Warningln("Executor update:", err) } pErr := r.processBuild(data, abortSignal, executorProvider) if pErr != nil { logrus.WithError(pErr).Error("Failed to process build") } r.checkFinishedConditions() executorProvider.Release(&r.RunnerConfig, data) } doneSignal <- 0 providerShutdownCtx, shutdownProvider := context.WithTimeout(context.Background(), r.getShutdownTimeout()) defer shutdownProvider() if managedProvider != nil { managedProvider.Shutdown(providerShutdownCtx, nil) } } func (r *RunSingleCommand) getShutdownTimeout() time.Duration { if r.shutdownTimeout > 0 { return time.Duration(r.shutdownTimeout) * time.Second } return common.DefaultShutdownTimeout } func NewRunSingleCommand(n common.Network, executorProviders executors.Providers) cli.Command { return common.NewCommand("run-single", "start single runner", &RunSingleCommand{ network: n, executorProviders: executorProviders, }) } ================================================ FILE: commands/single_test.go ================================================ //go:build !integration package commands import ( "context" "io" "os" "syscall" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" ) func init() { s := common.MockShell{} s.On("GetName").Return("script-shell") s.On("IsDefault").Return(false).Maybe() s.On("GenerateScript", mock.Anything, mock.Anything, mock.Anything).Return("script", nil) common.RegisterShell(&s) } type jobSimulation func(mock.Arguments) func TestSingleRunnerSigquit(t *testing.T) { var sendQuitSignal func() job := func(_ mock.Arguments) { sendQuitSignal() // simulate some real work while while sigquit get handled time.Sleep(time.Second) } single := mockingExecutionStack(t, "test-sigquit", 1, job) sendQuitSignal = func() { single.interruptSignals <- syscall.SIGQUIT } single.Execute(nil) } func TestSingleRunnerMaxBuilds(t *testing.T) { maxBuilds := 7 single := mockingExecutionStack(t, "test-max-build", maxBuilds, nil) single.Execute(nil) } func TestConfigFile(t *testing.T) { // create config file config_file, err := os.CreateTemp("", "gitlab-runner-test") require.NoError(t, err) filename := config_file.Name() defer os.Remove(filename) // fill config file with multiple runners _, err = config_file.WriteString(`[[runners]] name = "runner" token= "t1" url = "https://example.com/" executor = "shell" [[runners]] name = "runner2" token = "t2" url = "https://example.com/" executor = "shell"`) require.NoError(t, err) err = config_file.Close() require.NoError(t, err) // create command config for runner2 config := RunSingleCommand{ConfigFile: filename, RunnerName: "runner2"} config.HandleArgs() assert.Equal(t, "t2", config.Token) } func newRunSingleCommand(executorName string, network common.Network) *RunSingleCommand { systemID, _ := configfile.GenerateUniqueSystemID() return &RunSingleCommand{ network: network, RunnerConfig: common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: executorName, }, RunnerCredentials: common.RunnerCredentials{ URL: "http://example.com", Token: "_test_token_", }, SystemID: systemID, }, interruptSignals: make(chan os.Signal), } } func mockingExecutionStack( t *testing.T, executorName string, maxBuilds int, job jobSimulation, ) *RunSingleCommand { // mocking the whole stack e := common.NewMockExecutor(t) p := common.NewMockExecutorProvider(t) mockNetwork := common.NewMockNetwork(t) // Network jobData := spec.Job{} _, cancel := context.WithCancel(t.Context()) jobTrace := common.Trace{Writer: io.Discard} jobTrace.SetCancelFunc(cancel) jobTrace.SetAbortFunc(cancel) mockNetwork.On("RequestJob", mock.Anything, mock.Anything, mock.Anything).Return(&jobData, true).Times(maxBuilds) // Mock UpdateJob to return success for existing tests mockNetwork.On("UpdateJob", mock.Anything, mock.Anything, mock.Anything).Return(common.UpdateJobResult{State: common.UpdateSucceeded}).Times(maxBuilds) processJob := mockNetwork.On("ProcessJob", mock.Anything, mock.Anything).Return(&jobTrace, nil).Times(maxBuilds) if job != nil { processJob.Run(job) } // ExecutorProvider p.On("GetFeatures", mock.Anything).Return(nil).Times(maxBuilds) p.On("Create").Return(e).Times(maxBuilds) p.On("Acquire", mock.Anything).Return(common.NewMockExecutorData(t), nil).Times(maxBuilds) p.On("Release", mock.Anything, mock.Anything).Return(nil).Times(maxBuilds) // Executor e.On("Prepare", mock.Anything, mock.Anything, mock.Anything).Return(nil).Times(maxBuilds) e.On("Finish", nil).Times(maxBuilds) e.On("Cleanup").Times(maxBuilds) // Run script successfully e.On("Shell").Return(&common.ShellScriptInfo{Shell: "script-shell"}) e.On("Run", mock.Anything).Return(nil) single := newRunSingleCommand(executorName, mockNetwork) single.executorProviders = executors.NewProviderRegistry(map[string]common.ExecutorProvider{executorName: p}) single.MaxBuilds = maxBuilds t.Cleanup(cancel) return single } func TestRunSingleCommand_processBuild_HandlesUpdateAbort(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } jobData := &spec.Job{ ID: 123, Token: "job-token", } p := common.NewMockExecutorProvider(t) network := common.NewMockNetwork(t) mockTrace := common.NewMockJobTrace(t) mockTrace.On("SetDebugModeEnabled", false).Return() mockTrace.On("Finish").Return() // Mock RequestJob to return a job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(jobData, true) // Mock ProcessJob to return a trace network.On("ProcessJob", *runner, mock.AnythingOfType("*common.JobCredentials")).Return(mockTrace, nil) // Mock UpdateJob to return UpdateAbort network.On("UpdateJob", *runner, mock.AnythingOfType("*common.JobCredentials"), mock.AnythingOfType("common.UpdateJobInfo")). Return(common.UpdateJobResult{State: common.UpdateAbort}) cmd := &RunSingleCommand{ RunnerConfig: *runner, network: network, } err := cmd.processBuild(common.NewMockExecutorData(t), make(chan os.Signal), p) // When UpdateJob returns UpdateAbort, processBuild should return nil (no error) assert.Nil(t, err, "Should return no error when update is aborted") network.AssertExpectations(t) mockTrace.AssertExpectations(t) } func TestRunSingleCommand_processBuild_HandlesCancelRequested(t *testing.T) { runner := &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, } jobData := &spec.Job{ ID: 123, Token: "job-token", } p := common.NewMockExecutorProvider(t) network := common.NewMockNetwork(t) mockTrace := common.NewMockJobTrace(t) mockTrace.On("SetDebugModeEnabled", false).Return() mockTrace.On("Finish").Return() // Mock RequestJob to return a job network.On("RequestJob", mock.Anything, *runner, mock.Anything).Return(jobData, true) // Mock ProcessJob to return a trace network.On("ProcessJob", *runner, mock.AnythingOfType("*common.JobCredentials")).Return(mockTrace, nil) // Mock UpdateJob to return success but with CancelRequested=true network.On("UpdateJob", *runner, mock.AnythingOfType("*common.JobCredentials"), mock.AnythingOfType("common.UpdateJobInfo")). Return(common.UpdateJobResult{State: common.UpdateSucceeded, CancelRequested: true}) cmd := &RunSingleCommand{ RunnerConfig: *runner, network: network, } err := cmd.processBuild(common.NewMockExecutorData(t), make(chan os.Signal), p) // When UpdateJob has CancelRequested=true, processBuild should return nil (no error) assert.Nil(t, err, "Should return no error when job is being canceled") network.AssertExpectations(t) mockTrace.AssertExpectations(t) } ================================================ FILE: commands/steps/steps.go ================================================ package steps import ( "bufio" "context" "errors" "fmt" "io" "io/fs" "net" "os" "os/exec" "os/signal" "path/filepath" "syscall" "github.com/urfave/cli" "golang.org/x/sync/errgroup" "google.golang.org/grpc" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/functions/concrete" "gitlab.com/gitlab-org/gitlab-runner/functions/script_legacy" "gitlab.com/gitlab-org/step-runner/pkg/api" "gitlab.com/gitlab-org/step-runner/pkg/api/proxy" "gitlab.com/gitlab-org/step-runner/pkg/di" "gitlab.com/gitlab-org/step-runner/proto" ) const ( SubCommandName = "steps" ) func readyMessage(sockPath string) string { return fmt.Sprintf("step-runner is listening on socket %s", sockPath) } type IOStreams struct { Stdin io.Reader Stdout io.Writer Stderr io.Writer } func Bootstrap(destination string) error { source, err := os.Executable() if err != nil { return fmt.Errorf("failed to get source path: %w", err) } if err := os.MkdirAll(filepath.Dir(destination), 0o755); err != nil { return err } if err := copyFile(source, destination, 0o755); err != nil { return fmt.Errorf("failed to copy binary: %w", err) } sslSource := "/ca-certs.pem" if _, err := os.Stat(sslSource); err == nil { sslDest := filepath.Join(filepath.Dir(destination), "ca-certs.pem") if err := copyFile(sslSource, sslDest, 0o644); err != nil { return fmt.Errorf("failed to copy ssl certs: %w", err) } } gitSource := "/git" if _, err := os.Stat(gitSource); err == nil { gitDest := filepath.Join(filepath.Dir(destination), "git") if err := copyDir(gitSource, gitDest); err != nil { return fmt.Errorf("failed to copy git directory: %w", err) } } return nil } func copyDir(src, dst string) error { return filepath.WalkDir(src, func(path string, d fs.DirEntry, err error) error { if err != nil { return err } rel, err := filepath.Rel(src, path) if err != nil { return err } target := filepath.Join(dst, rel) info, err := d.Info() if err != nil { return err } if info.Mode()&os.ModeSymlink != 0 { link, err := os.Readlink(path) if err != nil { return err } return os.Symlink(link, target) } if d.IsDir() { return os.MkdirAll(target, info.Mode()) } return copyFile(path, target, info.Mode()) }) } func copyFile(src, dst string, mode os.FileMode) error { in, err := os.Open(src) if err != nil { return err } defer func() { _ = in.Close() }() out, err := os.Create(dst) if err != nil { return err } defer func() { _ = out.Close() }() if _, err := io.Copy(out, in); err != nil { return err } if err := out.Close(); err != nil { return err } return os.Chmod(dst, mode) } //nolint:gocognit func Serve(ctx context.Context, sockPath string, ioStreams IOStreams, cmdAndArgs ...string) error { listener, err := net.ListenUnix("unix", api.SocketAddr(sockPath)) if err != nil { return fmt.Errorf("opening socket: %w", err) } defer listener.Close() service, err := di.NewContainer( di.WithStepFunc("script_legacy", script_legacy.Spec(), script_legacy.Run), di.WithStepFunc("concrete", concrete.Spec(), concrete.Run), ).StepRunnerService() if err != nil { return fmt.Errorf("initializing step-runner: %w", err) } srv := grpc.NewServer() proto.RegisterStepRunnerServer(srv, service) ctx, cancel := context.WithCancel(ctx) defer cancel() wg, ctx := errgroup.WithContext(ctx) go func() { <-ctx.Done() srv.GracefulStop() }() wg.Go(func() error { if err := srv.Serve(listener); err != nil { return fmt.Errorf("server error: %w", err) } return nil }) fmt.Fprintln(os.Stderr, readyMessage(sockPath)) if len(cmdAndArgs) > 0 { wg.Go(func() error { // on script exit, we cancel() so that the step-runner serve also terminates defer cancel() stdin := bufio.NewReader(ioStreams.Stdin) stdinCheck := make(chan error, 1) go func() { _, err := stdin.Peek(1) stdinCheck <- err }() // block until either: // - cancellation // - data on stdin // // this prevents us running a command with no script to execute, and therefore returning // an error on cancellation even if there's no work performed. select { case err := <-stdinCheck: if errors.Is(err, io.EOF) { return nil } case <-ctx.Done(): return nil } cmd := exec.CommandContext(ctx, cmdAndArgs[0], cmdAndArgs[1:]...) cmd.Stdin = stdin cmd.Stdout = ioStreams.Stdout cmd.Stderr = ioStreams.Stderr // error is not wrapped intentionally: // os.ExitError needs to be returned unwrapped. return cmd.Run() }) } return wg.Wait() } func Proxy(sockPath string, io IOStreams) error { conn, err := net.DialUnix("unix", nil, api.SocketAddr(sockPath)) if err != nil { return fmt.Errorf("dialing: %w", err) } defer conn.Close() return proxy.Proxy(io.Stdin, io.Stdout, conn) } func NewCommand() cli.Command { const sockFlag = "socket" defaultSockPath := api.DefaultSocketPath() subcommands := []cli.Command{ { Name: "bootstrap", Usage: "bootstrap the gitlab-runner-helper to the build container", Action: func(cliCtx *cli.Context) error { destination := cliCtx.Args().First() if destination == "" { return fmt.Errorf("destination argument must be provided") } return Bootstrap(destination) }, }, { Name: "serve", Usage: "start the CI Functions server", Action: func(cliCtx *cli.Context) error { ctx, stopNotify := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) defer stopNotify() io := IOStreams{ Stdin: os.Stdin, Stdout: os.Stdout, Stderr: os.Stderr, } return Serve(ctx, cliCtx.String(sockFlag), io, cliCtx.Args()...) }, Flags: []cli.Flag{ cli.StringFlag{Name: sockFlag, Value: defaultSockPath}, }, }, { Name: "proxy", Usage: "connect stdin/stdout to the CI Functions server", Action: func(cliCtx *cli.Context) error { io := IOStreams{ Stdin: os.Stdin, Stdout: os.Stdout, } return Proxy(cliCtx.String(sockFlag), io) }, Flags: []cli.Flag{ cli.StringFlag{Name: sockFlag, Value: defaultSockPath}, }, }, } return common.NewCommandWithSubcommands( SubCommandName, "manage server that can run CI Functions (internal)", common.CommanderFunc(func(ctx *cli.Context) { _ = cli.ShowAppHelp(ctx) }), true, subcommands, ) } ================================================ FILE: commands/steps/steps_test.go ================================================ //go:build !integration package steps_test import ( "bytes" "cmp" "context" "fmt" "io" "net" "os" "os/exec" "path/filepath" "runtime" "slices" "strconv" "sync" "sync/atomic" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/steps" "gitlab.com/gitlab-org/step-runner/proto" "google.golang.org/grpc" "google.golang.org/grpc/credentials/insecure" ) const ( waitDeadline = 5 * time.Second waitTick = 100 * time.Millisecond externalMode = "external-mode" appMode = "app-mode" dontSleep = "0" sleepSomeTime = "2" sleepReallyLong = "300" ) func TestMain(m *testing.M) { if len(os.Args) > 1 { cmds := map[string]func(...string) int{ externalMode: beExternalBinary, appMode: beCliApp, } mode := os.Args[1] if cmd, ok := cmds[mode]; ok { mainTmpDir := os.Getenv("_MAIN_TMP_DIR") fakeCoverDir, err := os.MkdirTemp(mainTmpDir, mode) if err != nil { panic("creating fake cover dir: " + err.Error()) } os.Setenv("GOCOVERDIR", fakeCoverDir) args := slices.Clone(os.Args[2:]) os.Exit(cmd(args...)) } } mainTmpDir, err := os.MkdirTemp("", "") if err != nil { panic("creating main temp dir: " + err.Error()) } os.Setenv("_MAIN_TMP_DIR", mainTmpDir) rc := m.Run() err = os.RemoveAll(mainTmpDir) if err != nil { panic("deleting main temp dir: " + err.Error()) } os.Exit(rc) } func TestBootstrap(t *testing.T) { dir := t.TempDir() dest := filepath.Join(dir, "file") require.NoFileExists(t, dest) require.NoError(t, steps.Bootstrap(dest)) require.FileExists(t, dest) } func TestServe(t *testing.T) { t.Parallel() tests := []struct { name string sockName string cmdAndArgs []string stdin string explicitCancel bool expectedStdout string expectedStderr string expectListening bool expectErr string exitCode int }{ { name: "valid socket name", sockName: "some.sock", expectListening: true, }, { name: "invalid socket name", sockName: filepath.Join("subdir", "not-existent", "fails.sock"), expectErr: "opening socket: listen unix", }, { name: "with a successful command", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, dontSleep, "foo", "bar", "0"}, stdin: "some stdin", expectedStdout: "stdin: some stdin\nstdout: foo\n", expectedStderr: "stderr: bar\n", expectListening: true, }, { name: "with a failing command", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, dontSleep, "foo", "bar", "42"}, stdin: "some stdin", expectedStdout: "stdin: some stdin\nstdout: foo\n", expectedStderr: "stderr: bar\n", expectErr: "exit status 42", exitCode: 42, }, { name: "with a successful longer-running command", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, sleepSomeTime, "foo", "bar", "0"}, stdin: "some stdin", expectedStdout: "stdin: some stdin\nstdout: foo\n", expectedStderr: "stderr: bar\n", expectListening: true, }, { name: "with a failing longer-running command", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, sleepSomeTime, "foo", "bar", "43"}, stdin: "some stdin", expectedStdout: "stdin: some stdin\nstdout: foo\n", expectedStderr: "stderr: bar\n", expectErr: "exit status 43", expectListening: true, }, { name: "with context being canceled from the outside", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, sleepReallyLong, "", "", "42"}, stdin: "some stdin", explicitCancel: true, expectListening: true, expectErr: func() string { if runtime.GOOS == "windows" { return "exit status 1" } return "signal: killed" }(), }, { name: "serve and explicit cancel", sockName: "some.sock", cmdAndArgs: []string{os.Args[0], externalMode, sleepReallyLong, "", "", "42"}, explicitCancel: true, expectListening: true, expectErr: "", }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { t.Parallel() sockPath := filepath.Join(shortTempDir(t), tc.sockName) ctx, shutDown := context.WithCancel(t.Context()) t.Cleanup(shutDown) ioStreams, stdin, stdout, stderr := testIOStreams() serveErr := make(chan error) go func() { serveErr <- steps.Serve(ctx, sockPath, ioStreams, tc.cmdAndArgs...) }() t.Cleanup(func() { assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.NoFileExists(c, sockPath) }, waitDeadline, waitTick, "listening socket not cleaned up") }) if tc.expectListening { assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.FileExists(c, sockPath) }, waitDeadline, waitTick, "no listening socket found") client := stepsClient(t, sockPath) status, err := client.Status(t.Context(), &proto.StatusRequest{}) assert.NoError(t, err, "getting steps runner status") assert.Len(t, status.Jobs, 0, "job count") } if tc.stdin != "" { _, err := stdin.Write([]byte(tc.stdin)) require.NoError(t, err, "writing to stdin pipe to external binary") } require.NoError(t, stdin.Close(), "closing stdin pipe to external binary") if eo := tc.expectedStdout; eo != "" { assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.Equal(c, eo, stdout.String()) }, waitDeadline, waitTick, "stdout") } if ee := tc.expectedStderr; ee != "" { assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.Equal(c, ee, stderr.String()) }, waitDeadline, waitTick, "stderr") } if tc.explicitCancel { time.Sleep(time.Second) shutDown() } var err error // if explicit cancel, or expected error, we're expecting serve to return // otherwise, we let it run and it'll stop running when the test performs cleanup if tc.explicitCancel || tc.expectErr != "" { err = <-serveErr } if tc.expectErr != "" { require.ErrorContains(t, err, tc.expectErr) } else { require.NoError(t, err) } if tc.exitCode != 0 { exitErr, ok := err.(*exec.ExitError) require.True(t, ok, "must return ExitError directly, not wrapped") require.Equal(t, tc.exitCode, exitErr.ExitCode()) } }) } } func TestProxy(t *testing.T) { t.Parallel() tests := []struct { name string skipOnOS []string sockPath string toSend []string close bool closeErr error expectToReceive string expectShutdown bool expectedErr string }{ { name: "proxies", toSend: []string{"hello", "there"}, expectToReceive: "hello\nthere\n", }, { // On windows the proxy does not shut down when the output writer is closed, it does not close the Proxy. skipOnOS: []string{"windows"}, name: "stops proxying when input is closed", toSend: []string{"hello", "there"}, close: true, expectToReceive: "hello\nthere\n", expectShutdown: true, }, { // On windows the proxy does not shut down when the output writer is closed, it does not close the Proxy. skipOnOS: []string{"windows"}, name: "stops proxying when input is closed with error", toSend: []string{"hello", "there"}, close: true, closeErr: fmt.Errorf("oh no something went south"), expectToReceive: "hello\nthere\n", expectShutdown: true, expectedErr: "oh no something went south", }, { name: "does not proxy when socket is invalid", sockPath: filepath.Join("does", "not", "exist.sock"), expectShutdown: true, expectedErr: socketErrs.Get(t, "dialInvalidSocket"), }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { t.Parallel() if slices.Contains(tc.skipOnOS, runtime.GOOS) { t.Skipf("not supported on any of %q", tc.skipOnOS) } sockPath := cmp.Or(tc.sockPath, echoServer(t)) ioStreams, outWriter, in, _ := testIOStreams() var proxyHasShutDown atomic.Bool go func() { err := steps.Proxy(sockPath, ioStreams) proxyHasShutDown.Store(true) if ee := tc.expectedErr; ee != "" { assert.ErrorContains(t, err, ee) } else { assert.NoError(t, err, "proxy error") } }() for _, msg := range tc.toSend { _, err := fmt.Fprintln(outWriter, msg) assert.NoError(t, err, "writing data") } assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.Equal(c, tc.expectToReceive, in.String()) }, waitDeadline, waitTick, "data received from proxy is not as expected") if tc.close { outWriter.CloseWithError(tc.closeErr) } assert.EventuallyWithT(t, func(c *assert.CollectT) { assert.Equal(c, tc.expectShutdown, proxyHasShutDown.Load()) }, waitDeadline, waitTick, "proxy running state not as expected") }) } } func TestCli(t *testing.T) { t.Parallel() tests := []struct { name string args []string expectedStdoutRE string }{ { name: "steps command is hidden", args: []string{"--help"}, expectedStdoutRE: `\nCOMMANDS:\n[ ]+help[^\n]+\n\nGLOBAL OPTIONS:\n`, }, { name: "steps subcommands are visible", args: []string{"steps", "--help"}, expectedStdoutRE: `\nCOMMANDS:\n[ ]+bootstrap[^\n]+\n[ ]+serve[^\n]+\n[ ]+proxy[^\n]+\n\nOPTIONS:\n`, }, { name: "uses and shows the correct default socket path for serve", args: []string{"steps", "serve", "--help"}, expectedStdoutRE: `\n[ ]+--socket value[ ]+\(default: "[^"]+/step-runner.sock"\)\n`, }, { name: "uses and shows the correct default socket path for proxy", args: []string{"steps", "proxy", "--help"}, expectedStdoutRE: `\n[ ]+--socket value[ ]+\(default: "[^"]+/step-runner.sock"\)\n`, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { t.Parallel() stdout := &bytes.Buffer{} args := []string{appMode} args = append(args, tc.args...) cli := exec.Command(os.Args[0], args...) cli.Stdout = stdout err := cli.Run() assert.NoError(t, err, "error running CLI") if re := tc.expectedStdoutRE; re == "" { assert.Empty(t, stdout.String(), "stdout should be empty") } else { assert.Regexp(t, re, stdout.String(), "stdout not as expected") } }) } } // beCliApp runs the test binary mimicking a CLI app with the steps command set up. // With that, we can check on certain aspects of how commands are registered. func beCliApp(args ...string) int { app := cli.NewApp() app.Commands = []cli.Command{ steps.NewCommand(), } app.CommandNotFound = func(ctx *cli.Context, s string) { fmt.Fprintf(os.Stderr, "command not found: %s", s) os.Exit(-2) } runArgs := []string{"fakeArgv0"} runArgs = append(runArgs, args...) if err := app.Run(runArgs); err != nil { return -1 } return 0 } // beExternalBinary runs the test binary mimicking an external binary. // It expects the following args: // - sleepTime (mandatory) - how long to sleep before doing anything // - stdout (optional) - the data to print to stdout // - stderr (optional) - the data to print to stderr // - exitCode (optional) - the code to exit with // // The first thing it does is to read from stdin, until that stream is closed, and only then continues. It also prints // the data it received from stdin on stdout. func beExternalBinary(args ...string) int { stdin, err := io.ReadAll(os.Stdin) if err != nil { panic("reading stdin: " + err.Error()) } fmt.Fprintln(os.Stdout, "stdin: "+string(stdin)) sleepTime, err := strconv.Atoi(args[0]) if err != nil { panic("parsing sleep: " + err.Error()) } time.Sleep(time.Duration(sleepTime) * time.Second) rc := 0 l := len(args) switch { case l >= 4: var err error rc, err = strconv.Atoi(args[3]) if err != nil { panic("parsing return code: " + err.Error()) } fallthrough case l >= 3: fmt.Fprintln(os.Stderr, "stderr: "+args[2]) fallthrough case l >= 2: fmt.Fprintln(os.Stdout, "stdout: "+args[1]) } return rc } func testIOStreams() (steps.IOStreams, *io.PipeWriter, *syncBuffer, *syncBuffer) { stdinReader, stdinWriter := io.Pipe() stdout, stderr := &syncBuffer{}, &syncBuffer{} return steps.IOStreams{ Stdin: stdinReader, Stdout: stdout, Stderr: stderr, }, stdinWriter, stdout, stderr } // osErrs abstracts away different errors on different OSs type osErrs map[string]map[string]string func (oe osErrs) Get(t *testing.T, symbolicName string) string { errs, ok := oe[symbolicName] require.True(t, ok, "no errors for %q", symbolicName) os := runtime.GOOS if e, ok := errs[os]; ok { return e } if e, ok := errs[""]; ok { return e } require.FailNow(t, "no %q error for %s", symbolicName, os) return "" } var socketErrs = osErrs{ "listenInvalidSocket": { "windows": "bind: A socket operation encountered a dead network.", "": "bind: no such file or directory", }, "dialInvalidSocket": { "windows": "connect: A socket operation encountered a dead network.", "": "connect: no such file or directory", }, } // shortTempDir is a stand-in for t.TempDir, which aims to produce shorter path names. // Unix sockets on Windows have a max path len of 108 chars, so we need to be stingy. func shortTempDir(t *testing.T) string { dir, err := os.MkdirTemp("", "glr-sr-*") require.NoError(t, err, "creating temp dir") t.Cleanup(func() { err := os.RemoveAll(dir) require.NoError(t, err, "deleting temp dir") }) return dir } func stepsClient(t *testing.T, sockPath string) proto.StepRunnerClient { cliConn, err := grpc.NewClient("unix:"+sockPath, grpc.WithTransportCredentials(insecure.NewCredentials())) require.NoError(t, err) return proto.NewStepRunnerClient(cliConn) } func echoServer(t *testing.T) string { t.Helper() sockPath := filepath.Join(shortTempDir(t), "test.sock") l, err := net.Listen("unix", sockPath) require.NoError(t, err, "creating listener") t.Cleanup(func() { require.NoError(t, l.Close(), "closing listener") }) go func() { for { conn, err := l.Accept() if err != nil { continue } go func(conn net.Conn) { defer conn.Close() _, err = io.Copy(conn, conn) assert.NoError(t, err, "echoing data") }(conn) } }() return sockPath } type syncBuffer struct { sync.Mutex buf bytes.Buffer } func (sb *syncBuffer) Write(p []byte) (int, error) { sb.Lock() defer sb.Unlock() return sb.buf.Write(p) } var _ io.Writer = &syncBuffer{} func (sb *syncBuffer) String() string { sb.Lock() defer sb.Unlock() return sb.buf.String() } ================================================ FILE: commands/testdata/.runner_system_id ================================================ s_760931104d8c ================================================ FILE: commands/testdata/test-config.toml ================================================ concurrent = 2 check_interval = 3 log_level = "info" [[runners]] name = "test-docker-runner" url = "https://gitlab.example.com/" token = "test-token1" executor = "docker" environment = ["ENV=test"] [[runners]] name = "test-shell-runner-1" url = "https://gitlab.example.com/" token = "test-token2" executor = "shell" environment = ["ENV=test-local"] [[runners]] name = "test-shell-runner-2" url = "https://gitlab.example.com/" token = "test-token3" executor = "shell" environment = ["ENV=test-local"] ================================================ FILE: commands/tracing.go ================================================ package commands import ( "context" "net/http" "net/url" "time" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/observability" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/exporters/otlp/otlptrace" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc" "go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp" "go.opentelemetry.io/otel/sdk/resource" tracesdk "go.opentelemetry.io/otel/sdk/trace" semconv "go.opentelemetry.io/otel/semconv/v1.40.0" oteltrace "go.opentelemetry.io/otel/trace" "go.opentelemetry.io/otel/trace/noop" "golang.org/x/oauth2" "golang.org/x/oauth2/google" "google.golang.org/api/idtoken" "google.golang.org/api/option" "google.golang.org/grpc" ) const ( tracerName = "gitlab-ci-runner" spanNameJobExecution = "job_execution" spanAttrJobID attribute.Key = "ci.job.id" spanAttrProjectID attribute.Key = "ci.project.id" spanAttrPipelineID attribute.Key = "ci.pipeline.id" spanAttrPipelineSource attribute.Key = "ci.pipeline.source" spanAttrRunnerID attribute.Key = "ci.runner.id" spanAttrRunnerExecutor attribute.Key = "ci.runner.executor" spanAttrJobStatus attribute.Key = "ci.job.status" ) func tracerContext(ctx context.Context, log *logrus.Entry, tracingFeature *spec.Tracing) context.Context { if tracingFeature == nil { return ctx } traceID, err := oteltrace.TraceIDFromHex(tracingFeature.TraceID) if err != nil { log.WithError(err).Warn("Failed to parse trace ID") return ctx } spanID, err := oteltrace.SpanIDFromHex(tracingFeature.SpanParentID) if err != nil { log.WithError(err).Warn("Failed to parse span ID") return ctx } return oteltrace.ContextWithSpanContext(ctx, oteltrace.NewSpanContext(oteltrace.SpanContextConfig{ TraceID: traceID, SpanID: spanID, TraceFlags: oteltrace.FlagsSampled, // we got the trace feature set, so presumably the server wants the Runner to trace this job. Remote: true, })) } func tracer(log *logrus.Entry, tracingFeature *spec.Tracing) (oteltrace.Tracer, func() error) { if tracingFeature == nil || len(tracingFeature.OTELEndpoints) == 0 { return noop.Tracer{}, nopStop } tp := traceProviderForURLs(log, tracingFeature.OTELEndpoints) if tp == nil { return noop.Tracer{}, nopStop } tpStop := func() error { //nolint:contextcheck ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() return tp.Shutdown(ctx) } return tp.Tracer(tracerName), tpStop } func nopStop() error { return nil } func setJobSpanAttributes(span oteltrace.Span, build *common.Build, runner *common.RunnerConfig) { span.SetAttributes( spanAttrJobID.Int64(build.ID), spanAttrProjectID.Int64(build.JobInfo.ProjectID), spanAttrPipelineID.String(build.Variables.Value("CI_PIPELINE_ID")), spanAttrPipelineSource.String(build.Variables.Value("CI_PIPELINE_SOURCE")), spanAttrRunnerID.Int64(runner.ID), spanAttrRunnerExecutor.String(runner.Executor), ) } func traceProviderForURLs(log *logrus.Entry, endpoints []spec.OTELEndpoint) *tracesdk.TracerProvider { var exporters []tracesdk.SpanExporter for _, e := range endpoints { if exp := exporterForEndpoint(log, &e); exp != nil { exporters = append(exporters, exp) } } var exporter tracesdk.SpanExporter switch len(exporters) { case 0: return nil case 1: exporter = exporters[0] default: exporter = &observability.MultiSpanExporter{ Exporters: exporters, } } return tracesdk.NewTracerProvider( tracesdk.WithResource(constructOTELResource()), tracesdk.WithBatcher(exporter), tracesdk.WithSampler(tracesdk.AlwaysSample()), // we got the tracing configuration - we must trace! ) } //nolint:gocognit func exporterForEndpoint(log *logrus.Entry, e *spec.OTELEndpoint) tracesdk.SpanExporter { u, err := url.Parse(e.URL) if err != nil { log.WithError(err).Warn("Error parsing OTEL URL") return nil } var otlpHTTPOptions []otlptracehttp.Option var otlpGRPCOptions []otlptracegrpc.Option switch u.Scheme { case "http": otlpHTTPOptions = []otlptracehttp.Option{ otlptracehttp.WithEndpoint(u.Host), otlptracehttp.WithURLPath(u.Path), otlptracehttp.WithInsecure(), } case "https": otlpHTTPOptions = []otlptracehttp.Option{ otlptracehttp.WithEndpoint(u.Host), otlptracehttp.WithURLPath(u.Path), } case "grpc": otlpGRPCOptions = []otlptracegrpc.Option{ otlptracegrpc.WithEndpoint(u.Host), // gRPC ignores the URL path, don't bother setting it. otlptracegrpc.WithInsecure(), } case "grpcs": otlpGRPCOptions = []otlptracegrpc.Option{ otlptracegrpc.WithEndpoint(u.Host), // gRPC ignores the URL path, don't bother setting it. } default: log.Warn("Unsupported scheme in URL: ", u.Scheme) return nil } if e.Auth != nil { switch e.Auth.Type { case "http_bearer_gcp_oidc": oidcCfg := e.Auth.HTTPBearerGCPOIDC if oidcCfg == nil { log.Warn("Missing http_bearer_gcp_oidc field for tracing URL: ", e.URL) return nil } credentials, err := google.FindDefaultCredentials(context.Background()) if err != nil { log.WithError(err).Warn("Error finding default GCP credentials for tracing URL: ", e.URL) return nil } ts, err := idtoken.NewTokenSource(context.Background(), oidcCfg.Audience, option.WithCredentials(credentials)) if err != nil { log.WithError(err).Warn("Error creating token source") return nil } ts = oauth2.ReuseTokenSource(nil, ts) switch u.Scheme { case "http", "https": otlpHTTPOptions = append(otlpHTTPOptions, otlptracehttp.WithHTTPClient(&http.Client{ Transport: &oauth2.Transport{ Base: http.DefaultTransport, Source: ts, }, CheckRedirect: func(req *http.Request, via []*http.Request) error { return http.ErrUseLastResponse }, }), ) default: // gRPC otlpGRPCOptions = append(otlpGRPCOptions, otlptracegrpc.WithDialOption(grpc.WithPerRPCCredentials(&perRPCCredentialsFromTokenSource{ src: ts, })), ) } default: log.Warnf("Unsupported authentication type %q for OTLP endpoint: %s", e.Auth.Type, e.URL) return nil } } var c otlptrace.Client if len(otlpHTTPOptions) > 0 { c = otlptracehttp.NewClient(otlpHTTPOptions...) } else { c = otlptracegrpc.NewClient(otlpGRPCOptions...) } exporter, err := otlptrace.New(context.Background(), c) if err != nil { log.WithError(err).Warn("Error constructing OTLP exporter") return nil } return exporter } func constructOTELResource() *resource.Resource { // Do not use resource.Default() as it doesn't provide anything particularly useful but leads to problems. // See https://github.com/open-telemetry/opentelemetry-go/issues/3769 and https://github.com/letsencrypt/boulder/pull/7712. return resource.NewWithAttributes( semconv.SchemaURL, semconv.ServiceName("runner"), semconv.ServiceVersion(common.AppVersion.Version), ) } type perRPCCredentialsFromTokenSource struct { src oauth2.TokenSource } func (p *perRPCCredentialsFromTokenSource) GetRequestMetadata(ctx context.Context, uri ...string) (map[string]string, error) { t, err := p.src.Token() if err != nil { return nil, err } return map[string]string{ "authorization": t.Type() + " " + t.AccessToken, // metadata keys must be lowercase }, nil } func (p *perRPCCredentialsFromTokenSource) RequireTransportSecurity() bool { return false // it should work for insecure connections. } ================================================ FILE: commands/tracing_test.go ================================================ //go:build !integration package commands import ( "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" oteltrace "go.opentelemetry.io/otel/trace" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func TestTracerContext(t *testing.T) { log := logrus.WithFields(nil) t.Run("nil tracing feature returns original context", func(t *testing.T) { baseCtx := t.Context() ctx := tracerContext(baseCtx, log, nil) assert.Equal(t, baseCtx, ctx) }) t.Run("invalid trace ID returns original context", func(t *testing.T) { baseCtx := t.Context() ctx := tracerContext(baseCtx, log, &spec.Tracing{ TraceID: "not-a-valid-hex", }) assert.Equal(t, baseCtx, ctx) }) t.Run("valid trace ID and span parent ID sets both", func(t *testing.T) { baseCtx := t.Context() traceID := "0000000000000000000000000000abcd" spanID := "000000000000abcd" ctx := tracerContext(baseCtx, log, &spec.Tracing{ TraceID: traceID, SpanParentID: spanID, }) sc := oteltrace.SpanFromContext(ctx).SpanContext() assert.Equal(t, traceID, sc.TraceID().String()) assert.Equal(t, spanID, sc.SpanID().String()) }) } func TestTraceProviderForURLs(t *testing.T) { log := logrus.WithFields(nil) t.Run("no endpoints returns nil", func(t *testing.T) { tp := traceProviderForURLs(log, nil) assert.Nil(t, tp) }) t.Run("invalid URL returns nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{{URL: "://invalid"}} tp := traceProviderForURLs(log, endpoints) assert.Nil(t, tp) }) t.Run("unsupported scheme returns nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{{URL: "ftp://localhost:4318"}} tp := traceProviderForURLs(log, endpoints) assert.Nil(t, tp) }) for _, scheme := range []string{"http", "https", "grpc", "grpcs"} { t.Run("scheme "+scheme+" without auth returns non-nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{{URL: scheme + "://localhost:4318/v1/traces"}} tp := traceProviderForURLs(log, endpoints) require.NotNil(t, tp) _ = tp.Shutdown(t.Context()) }) } t.Run("unsupported auth type returns nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{{ URL: "http://localhost:4318", Auth: &spec.OTELEndpointAuth{Type: "unsupported_type"}, }} tp := traceProviderForURLs(log, endpoints) assert.Nil(t, tp) }) t.Run("http_bearer_gcp_oidc with nil config returns nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{{ URL: "http://localhost:4318", Auth: &spec.OTELEndpointAuth{ Type: "http_bearer_gcp_oidc", HTTPBearerGCPOIDC: nil, }, }} tp := traceProviderForURLs(log, endpoints) assert.Nil(t, tp) }) t.Run("multiple endpoints with one skipped returns non-nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{ {URL: "http://localhost:4318"}, {URL: "ftp://invalid"}, } tp := traceProviderForURLs(log, endpoints) require.NotNil(t, tp) _ = tp.Shutdown(t.Context()) }) t.Run("multiple valid endpoints returns non-nil", func(t *testing.T) { endpoints := []spec.OTELEndpoint{ {URL: "http://localhost:4318"}, {URL: "grpc://localhost:4317"}, } tp := traceProviderForURLs(log, endpoints) require.NotNil(t, tp) _ = tp.Shutdown(t.Context()) }) } ================================================ FILE: commands/unregister.go ================================================ package commands import ( "errors" "fmt" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/network" ) type UnregisterCommand struct { common.RunnerCredentials network common.Network ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` Name string `toml:"name" json:"name" short:"n" long:"name" description:"Name of the runner you wish to unregister"` AllRunners bool `toml:"all_runners" json:"all-runners" long:"all-runners" description:"Unregister all runners"` } func NewUnregisterCommand(n common.Network) cli.Command { return common.NewCommand("unregister", "unregister specific runner", &UnregisterCommand{ network: n, }) } func (c *UnregisterCommand) unregisterAllRunners(cfg *common.Config) ([]*common.RunnerConfig, error) { logrus.Warningln("Unregistering all runners") var errs error var runners []*common.RunnerConfig for _, r := range cfg.Runners { if !c.unregisterRunner(*r, r.SystemID) { errs = errors.Join(errs, fmt.Errorf("failed to unregister runner %q", r.Name)) // If unregister fails, leave the runner in the config runners = append(runners, r) } } return runners, errs } func (c *UnregisterCommand) unregisterSingleRunner(cfg *common.Config) ([]*common.RunnerConfig, error) { var runnerConfig *common.RunnerConfig var err error switch { case c.Name != "" && c.Token != "": runnerConfig, err = cfg.RunnerByNameAndToken(c.Name, c.Token) case c.Token != "": runnerConfig, err = cfg.RunnerByToken(c.Token) case c.Name != "": runnerConfig, err = cfg.RunnerByName(c.Name) default: return nil, errors.New("at least one of --name or --token must be specified") } if err != nil { return nil, fmt.Errorf("get runner by token or name: %w", err) } c.RunnerCredentials = runnerConfig.RunnerCredentials // Unregister given Token and URL of the runner if !c.unregisterRunner(*runnerConfig, runnerConfig.SystemID) { return nil, fmt.Errorf("failed to unregister runner %q", c.Name) } var runners []*common.RunnerConfig for _, otherRunner := range cfg.Runners { if otherRunner.RunnerCredentials != c.RunnerCredentials { runners = append(runners, otherRunner) } } return runners, nil } func (c *UnregisterCommand) unregisterRunner(r common.RunnerConfig, systemID string) bool { if network.TokenIsCreatedRunnerToken(r.Token) { return c.network.UnregisterRunnerManager(r, systemID) } return c.network.UnregisterRunner(r) } func (c *UnregisterCommand) Execute(context *cli.Context) { userModeWarning(false) cfg := configfile.New(c.ConfigFile) var changed bool if err := cfg.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { var runners []*common.RunnerConfig var err error if c.AllRunners { runners, err = c.unregisterAllRunners(cfg) if err != nil { logrus.WithError(err).Errorln("Failed to unregister runners") } } else { runners, err = c.unregisterSingleRunner(cfg) if err != nil { return fmt.Errorf("unregister runner: %w", err) } } changed = len(cfg.Runners) != len(runners) if changed { cfg.Runners = runners } return nil })); err != nil { logrus.WithError(err).Fatalln("failed to unregister runner") } // check if anything changed if !changed { return } // save config file if err := cfg.Save(); err != nil { logrus.Fatalln("Failed to update", c.ConfigFile, err) } logrus.Println("Updated", c.ConfigFile) } ================================================ FILE: commands/unregister_test.go ================================================ //go:build !integration package commands import ( "fmt" "os" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" ) const ( testRunner1 = "test-runner-1" testRunner2 = "test-runner-2" testToken1 = "test-token-1" testToken2 = "test-token-2" ) var ( testRunnerConfig1 = common.RunnerConfig{ Name: testRunner1, RunnerCredentials: common.RunnerCredentials{Token: testToken1}, } testRunnerConfig2 = common.RunnerConfig{ Name: testRunner2, RunnerCredentials: common.RunnerCredentials{Token: testToken2}, } ) func TestUnregisterCommand_unregisterAllRunner(t *testing.T) { testCases := []struct { name string cfgs []*common.RunnerConfig setup func(tb testing.TB) common.Network expectedRunners []*common.RunnerConfig expectedErr string }{ { name: "successfully unregister all runners", cfgs: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig1, ).Once().Return(true) mn.On( "UnregisterRunner", testRunnerConfig2, ).Once().Return(true) return mn }, }, { name: "successfully unregister some runners", cfgs: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig1, ).Once().Return(true) mn.On( "UnregisterRunner", testRunnerConfig2, ).Once().Return(false) return mn }, expectedRunners: []*common.RunnerConfig{ &testRunnerConfig2, }, expectedErr: `failed to unregister runner "test-runner-2"`, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { cmd := UnregisterCommand{network: tc.setup(t)} runners, err := cmd.unregisterAllRunners(&common.Config{Runners: tc.cfgs}) if tc.expectedErr != "" { assert.Error(t, err) assert.ErrorContains(t, err, tc.expectedErr) } else { assert.NoError(t, err) } assert.Equal(t, tc.expectedRunners, runners) }) } } func TestUnregisterCommand_unregisterSingleRunner(t *testing.T) { testCases := []struct { name string cfg *common.Config runnerName string runnerConfig common.RunnerConfig setup func(tb testing.TB) common.Network expectedRunners []*common.RunnerConfig expectedErr string }{ { name: "unregister with runner creds", cfg: &common.Config{ Runners: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, }, runnerConfig: testRunnerConfig1, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig1, ).Return(true) return mn }, expectedRunners: []*common.RunnerConfig{ &testRunnerConfig2, }, }, { name: "unregister with runner name", cfg: &common.Config{ Runners: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, }, runnerName: testRunner1, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig1, ).Return(true) return mn }, expectedRunners: []*common.RunnerConfig{ &testRunnerConfig2, }, }, { name: "unregister with runner name and creds", cfg: &common.Config{ Runners: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, }, runnerName: testRunner2, runnerConfig: testRunnerConfig2, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig2, ).Return(true) return mn }, expectedRunners: []*common.RunnerConfig{ &testRunnerConfig1, }, }, { name: "name not found", cfg: &common.Config{}, runnerName: "not-found-runner", setup: func(tb testing.TB) common.Network { tb.Helper() return common.NewMockNetwork(t) }, expectedErr: "could not find a runner with the name 'not-found-runner'", }, { name: "token not found", cfg: &common.Config{}, runnerConfig: common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{Token: "not-found-token"}, }, setup: func(tb testing.TB) common.Network { tb.Helper() return common.NewMockNetwork(t) }, expectedErr: "could not find a runner with the token 'not-found'", }, { name: "missing name or token", cfg: &common.Config{}, setup: func(tb testing.TB) common.Network { tb.Helper() return common.NewMockNetwork(t) }, expectedErr: "at least one of --name or --token must be specified", }, { name: "unregister failure", cfg: &common.Config{ Runners: []*common.RunnerConfig{ &testRunnerConfig1, &testRunnerConfig2, }, }, runnerConfig: testRunnerConfig1, runnerName: testRunner1, setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", testRunnerConfig1, ).Return(false) return mn }, expectedErr: `failed to unregister runner "test-runner-1"`, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { cmd := UnregisterCommand{ network: tc.setup(t), Name: tc.runnerName, RunnerCredentials: tc.runnerConfig.RunnerCredentials, } runners, err := cmd.unregisterSingleRunner(tc.cfg) if tc.expectedErr != "" { assert.Error(t, err) assert.ErrorContains(t, err, tc.expectedErr) assert.Nil(t, runners) } else { assert.NoError(t, err) assert.Equal(t, tc.expectedRunners, runners) } }) } } func TestUnregisterCommand_unregisterRunner(t *testing.T) { testCases := []struct { name string setup func(tb testing.TB) common.Network token string systemID string expected bool }{ { name: "unregister runner manager success", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunnerManager", mock.Anything, "test-system-id", ).Return(true) return mn }, token: "glrt-test-token", systemID: "test-system-id", expected: true, }, { name: "unregister runner manager failure", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunnerManager", mock.Anything, "test-system-id", ).Return(false) return mn }, token: "glrt-test-token", systemID: "test-system-id", expected: false, }, { name: "unregister runner success", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", mock.Anything, ).Return(true) return mn }, token: "test-token", expected: true, }, { name: "unregister runner failure", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", mock.Anything, ).Return(false) return mn }, token: "test-token", expected: false, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { cmd := UnregisterCommand{network: tc.setup(t)} result := cmd.unregisterRunner(common.RunnerConfig{RunnerCredentials: common.RunnerCredentials{Token: tc.token}}, tc.systemID) assert.Equal(t, tc.expected, result) }) } } func TestUnregisterCommand_Execute(t *testing.T) { testCases := []struct { name string removeAllRunners bool runnerName string setup func(tb testing.TB) common.Network removedRunners []string remainingRunners []string }{ { name: "success removing single runner", runnerName: "test-docker-runner", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", mock.Anything, ).Return(true) return mn }, removedRunners: []string{"test-docker-runner"}, remainingRunners: []string{"test-shell-runner-1", "test-shell-runner-2"}, }, { name: "success removing all runners", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", mock.Anything, ).Return(true) return mn }, removeAllRunners: true, removedRunners: []string{"test-docker-runner", "test-shell-runner-1", "test-shell-runner-2"}, }, { name: "partial failure removing all runners", setup: func(tb testing.TB) common.Network { tb.Helper() mn := common.NewMockNetwork(t) mn.On( "UnregisterRunner", mock.Anything, ).Once().Return(true) mn.On( "UnregisterRunner", mock.Anything, ).Once().Return(true) mn.On( "UnregisterRunner", mock.Anything, ).Once().Return(false) return mn }, removeAllRunners: true, remainingRunners: []string{"test-shell-runner-2"}, removedRunners: []string{"test-docker-runner", "test-shell-runner-1"}, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { oldConfig, err := os.ReadFile("./testdata/test-config.toml") require.NoError(t, err) t.Cleanup(func() { require.NoError(t, os.WriteFile("./testdata/test-config.toml", oldConfig, 0o600)) }) cmd := &UnregisterCommand{ network: tc.setup(t), ConfigFile: "./testdata/test-config.toml", Name: tc.runnerName, AllRunners: tc.removeAllRunners, } cmd.Execute(&cli.Context{}) postExecuteConfig := configfile.New("./testdata/test-config.toml") err = postExecuteConfig.Load() require.NoError(t, err) for _, runnerName := range tc.removedRunners { _, err = postExecuteConfig.Config().RunnerByName(runnerName) assert.Error(t, err) assert.ErrorContains(t, err, fmt.Sprintf("could not find a runner with the name '%s'", runnerName)) } assert.Len(t, postExecuteConfig.Config().Runners, len(tc.remainingRunners)) for _, runnerName := range tc.remainingRunners { _, err = postExecuteConfig.Config().RunnerByName(runnerName) assert.NoError(t, err) } }) } } ================================================ FILE: commands/user_mode_warning.go ================================================ package commands import ( "os" "runtime" "github.com/sirupsen/logrus" ) func userModeWarning(withRun bool) { logrus.WithFields(logrus.Fields{ "GOOS": runtime.GOOS, "uid": os.Getuid(), }).Debugln("Checking runtime mode") // everything is supported on windows if runtime.GOOS == osTypeWindows { return } systemMode := os.Getuid() == 0 // We support services on Linux, Windows and Darwin noServices := runtime.GOOS != osTypeLinux && runtime.GOOS != osTypeDarwin // We don't support services installed as an User on Linux noUserService := !systemMode && runtime.GOOS == osTypeLinux if systemMode { logrus.Infoln("Running in system-mode.") } else { logrus.Warningln("Running in user-mode.") } if withRun { if noServices { logrus.Warningln("You need to manually start builds processing:") logrus.Warningln("$ gitlab-runner run") } else if noUserService { logrus.Warningln("The user-mode requires you to manually start builds processing:") logrus.Warningln("$ gitlab-runner run") } } if !systemMode { logrus.Warningln("Use sudo for system-mode:") logrus.Warningln("$ sudo gitlab-runner...") } logrus.Infoln("") } ================================================ FILE: commands/verify.go ================================================ package commands import ( "errors" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/commands/internal/configfile" "gitlab.com/gitlab-org/gitlab-runner/common" ) type VerifyCommand struct { common.RunnerCredentials network common.Network ConfigFile string `short:"c" long:"config" env:"CONFIG_FILE" description:"Config file"` Name string `toml:"name" json:"name" short:"n" long:"name" description:"Name of the runner you wish to verify"` DeleteNonExisting bool `long:"delete" description:"Delete no longer existing runners?"` } func NewVerifyCommand(n common.Network) cli.Command { return common.NewCommand("verify", "verify all registered runners", &VerifyCommand{ network: n, }) } //nolint:gocognit func (c *VerifyCommand) Execute(context *cli.Context) { userModeWarning(true) var hasSelector = c.Name != "" || c.RunnerCredentials.URL != "" || c.RunnerCredentials.Token != "" cfg := configfile.New(c.ConfigFile) var unverified int if err := cfg.Load(configfile.WithMutateOnLoad(func(cfg *common.Config) error { var ok []*common.RunnerConfig var verified int for _, runner := range cfg.Runners { if !hasSelector || runner.Name == c.Name || runner.RunnerCredentials.SameAs(&c.RunnerCredentials) { verified++ if c.network.VerifyRunner(*runner, runner.SystemID) == nil { unverified++ continue } } ok = append(ok, runner) } // update config runners cfg.Runners = ok if hasSelector && verified == 0 { return errors.New("no runner matches the filtering parameters") } return nil })); err != nil { logrus.Fatalln(err) } // check if anything changed if unverified == 0 { return } if !c.DeleteNonExisting { logrus.Fatalln("Failed to verify runners") return } // save config file if err := cfg.Save(); err != nil { logrus.Fatalln("Failed to update", c.ConfigFile, err) } logrus.Println("Updated", c.ConfigFile) } ================================================ FILE: commands/wrapper.go ================================================ package commands import ( "context" "errors" "fmt" "net" "net/url" "os" "os/signal" "syscall" "time" "github.com/sirupsen/logrus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/runner_wrapper" "gitlab.com/gitlab-org/gitlab-runner/helpers/runner_wrapper/api/server" ) const ( defaultWrapperGRPCListen = "tcp://localhost:7777" ) var ( errFailedToParseGRPCAddress = errors.New("failed to parse grpc-listen address") errUnsupportedGRPCAddressScheme = errors.New("unsupported grpc-listen address scheme") ) type logHook struct{} func (h *logHook) Levels() []logrus.Level { return logrus.AllLevels } func (h *logHook) Fire(e *logrus.Entry) error { e.Message = "[WRAPPER] " + e.Message return nil } type RunnerWrapperCommand struct { GRPCListen string `long:"grpc-listen"` ProcessTerminationTimeout time.Duration `long:"process-termination-timeout"` } func NewRunnerWrapperCommand() cli.Command { return common.NewCommand( "wrapper", "start multi runner service wrapped with gRPC manager server", &RunnerWrapperCommand{ GRPCListen: defaultWrapperGRPCListen, ProcessTerminationTimeout: runner_wrapper.DefaultTerminationTimeout, }, ) } func (c *RunnerWrapperCommand) Execute(cctx *cli.Context) { logrus.AddHook(new(logHook)) log := logrus.WithField("wrapper", true) grpcLog := log.WithField("grpc-listen-addr", c.GRPCListen) path, err := os.Executable() if err != nil { log.WithError(err).Fatal("Failed to get executable path") } l, err := c.createListener() if err != nil { grpcLog.WithError(err).Fatal("Failed to create listener") } ctx, cancelFn := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) defer cancelFn() w := runner_wrapper.New(log, path, cctx.Args()) w.SetTerminationTimeout(c.ProcessTerminationTimeout) srv := server.New(grpcLog, w) go srv.Listen(l) err = w.Run(ctx) if err != nil { log.WithError(err).Fatal("Failed while executing wrapped command") } srv.Stop() log.Info("All wrapper tasks finished. See you!") } func (c *RunnerWrapperCommand) createListener() (net.Listener, error) { uri, err := url.ParseRequestURI(c.GRPCListen) if err != nil { return nil, fmt.Errorf("%w: %w", errFailedToParseGRPCAddress, err) } switch uri.Scheme { case "unix": return net.Listen("unix", uri.Path) case "tcp": return net.Listen("tcp", uri.Host) default: return nil, fmt.Errorf("%w: %s", errUnsupportedGRPCAddressScheme, uri.Scheme) } } ================================================ FILE: commands/wrapper_test.go ================================================ //go:build !integration package commands import ( "fmt" "net" "os" "path/filepath" "runtime" "testing" "github.com/stretchr/testify/assert" ) func TestRunnerWrapperCommand_createListener(t *testing.T) { testSocketPath := filepath.Join(t.TempDir(), "test.sock") skipOnWindows := func(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("Test doesn't work reliably on Windows (unix socket usage)") } } tests := map[string]struct { skip func(t *testing.T) grpcAddress string expectedNetwork string expectedAddress string assertError func(t *testing.T, err error) }{ "empty address": { grpcAddress: "", assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, errFailedToParseGRPCAddress) }, }, "proper unix socket with full scheme - unix://": { skip: skipOnWindows, grpcAddress: fmt.Sprintf("unix://%s", testSocketPath), expectedNetwork: "unix", expectedAddress: testSocketPath, }, "proper unix socket - unix:": { skip: skipOnWindows, grpcAddress: fmt.Sprintf("unix:%s", testSocketPath), expectedNetwork: "unix", expectedAddress: testSocketPath, }, "invalid unix socket": { skip: skipOnWindows, grpcAddress: fmt.Sprintf("unix:/%s", testSocketPath), assertError: func(t *testing.T, err error) { var eerr *net.OpError if assert.ErrorAs(t, err, &eerr) { assert.Equal(t, "unix", eerr.Net) assert.Contains(t, testSocketPath, eerr.Addr.String()) var eeerr *os.SyscallError if assert.ErrorAs(t, eerr, &eeerr) { assert.Equal(t, "bind", eeerr.Syscall) } } }, }, "proper tcp socket": { grpcAddress: "tcp://127.0.0.1:1234", expectedNetwork: "tcp", expectedAddress: "127.0.0.1:1234", }, "invalid tcp socket": { grpcAddress: "tcp://1:1234", assertError: func(t *testing.T, err error) { var eerr *net.OpError if assert.ErrorAs(t, err, &eerr) { assert.Equal(t, "listen", eerr.Op) assert.Equal(t, "tcp", eerr.Net) } }, }, "unsupported scheme": { grpcAddress: "udp://127.0.0.1:1234", assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, errUnsupportedGRPCAddressScheme) }, }, "default address": { grpcAddress: defaultWrapperGRPCListen, expectedNetwork: "tcp", expectedAddress: "127.0.0.1:7777", }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { if tc.skip != nil { tc.skip(t) } c := &RunnerWrapperCommand{ GRPCListen: tc.grpcAddress, } l, err := c.createListener() if tc.assertError != nil { tc.assertError(t, err) return } defer func(l net.Listener) { if l != nil { l.Close() } }(l) assert.NoError(t, err) assert.Equal(t, tc.expectedNetwork, l.Addr().Network()) assert.Equal(t, tc.expectedAddress, l.Addr().String()) }) } } ================================================ FILE: common/allowed_images.go ================================================ package common import ( "errors" "fmt" "github.com/bmatcuk/doublestar/v4" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" ) type VerifyAllowedImageOptions struct { Image string OptionName string AllowedImages []string InternalImages []string } var ErrDisallowedImage = errors.New("disallowed image") func VerifyAllowedImage(options VerifyAllowedImageOptions, logger buildlogger.Logger) error { for _, allowedImage := range options.AllowedImages { ok, _ := doublestar.Match(allowedImage, options.Image) if ok { return nil } } for _, internalImage := range options.InternalImages { if internalImage == options.Image { return nil } } if len(options.AllowedImages) != 0 { logger.Println() logger.Errorln( fmt.Sprintf("The %q image is not present on list of allowed %s:", options.Image, options.OptionName), ) for _, allowedImage := range options.AllowedImages { logger.Println("-", allowedImage) } logger.Println() } else { // by default allow to override the image name return nil } logger.Println( `Please check runner's allowed_images configuration: ` + `https://docs.gitlab.com/runner/configuration/advanced-configuration/`, ) return ErrDisallowedImage } ================================================ FILE: common/allowed_images_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" ) type allowedImageTestCase struct { image string allowedImages []string internalImages []string expectedAllowed bool } var allowedImageTestCases = []allowedImageTestCase{ {image: "alpine", allowedImages: []string{"alpine"}, internalImages: []string{}, expectedAllowed: true}, {image: "alpine", allowedImages: []string{"ubuntu"}, internalImages: []string{}, expectedAllowed: false}, {image: "library/ruby", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "library/ruby", allowedImages: []string{"**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "library/ruby", allowedImages: []string{"**/*:*"}, internalImages: []string{}, expectedAllowed: false}, {image: "library/ruby", allowedImages: []string{"*/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "library/ruby", allowedImages: []string{"*/*:*"}, internalImages: []string{}, expectedAllowed: false}, {image: "library/ruby:2.1", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "library/ruby:2.1", allowedImages: []string{"**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "library/ruby:2.1", allowedImages: []string{"**/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "library/ruby:2.1", allowedImages: []string{"*/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "library/ruby:2.1", allowedImages: []string{"*/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/group/subgroup/ruby", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/group/subgroup/ruby", allowedImages: []string{"my.registry.tld/**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/group/subgroup/ruby", allowedImages: []string{"my.registry.tld/*/*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/group/subgroup/ruby", allowedImages: []string{"my.registry.tld/*/*/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/group/subgroup/ruby:2.1", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/group/subgroup/ruby:2.1", allowedImages: []string{"my.registry.tld/**/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/group/subgroup/ruby:2.1", allowedImages: []string{"my.registry.tld/*/*/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/group/subgroup/ruby:2.1", allowedImages: []string{"my.registry.tld/*/*:*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/library/ruby", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/library/ruby", allowedImages: []string{"my.registry.tld/**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/library/ruby", allowedImages: []string{"my.registry.tld/*/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/library/ruby:2.1", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/library/ruby:2.1", allowedImages: []string{"my.registry.tld/**/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/library/ruby:2.1", allowedImages: []string{"my.registry.tld/*/*:*"}, internalImages: []string{}, expectedAllowed: true}, {image: "my.registry.tld/ruby", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "my.registry.tld/ruby:2.1", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: false}, {image: "ruby", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: true}, {image: "ruby", allowedImages: []string{"**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "ruby:2.1", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: true}, {image: "ruby:2.1", allowedImages: []string{"**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "ruby:latest", allowedImages: []string{"*"}, internalImages: []string{}, expectedAllowed: true}, {image: "ruby:latest", allowedImages: []string{"**/*"}, internalImages: []string{}, expectedAllowed: true}, {image: "gitlab/gitlab-runner-helper", allowedImages: []string{"alpine"}, internalImages: []string{"gitlab/gitlab-runner-helper"}, expectedAllowed: true}, {image: "alpine", allowedImages: []string{}, internalImages: []string{}, expectedAllowed: true}, } func TestVerifyAllowedImage(t *testing.T) { logger := buildlogger.New(nil, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}) for _, test := range allowedImageTestCases { t.Run(test.image, func(t *testing.T) { options := VerifyAllowedImageOptions{ Image: test.image, OptionName: "", AllowedImages: test.allowedImages, InternalImages: test.internalImages, } err := VerifyAllowedImage(options, logger) if test.expectedAllowed { assert.NoError(t, err, "%q must be allowed by %q", test.image, test.allowedImages) } else { assert.Error(t, err, "%q must not be allowed by %q", test.image, test.allowedImages) } }) } } ================================================ FILE: common/build.go ================================================ package common import ( "context" "crypto/sha256" "encoding/hex" "errors" "fmt" "io" "net/url" "os" "path" "path/filepath" "runtime/debug" "strconv" "strings" "sync" "time" "unicode" "github.com/jpillora/backoff" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/dns" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/tls" url_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/url" "gitlab.com/gitlab-org/gitlab-runner/referees" "gitlab.com/gitlab-org/gitlab-runner/session" "gitlab.com/gitlab-org/gitlab-runner/session/proxy" "gitlab.com/gitlab-org/gitlab-runner/session/terminal" "gitlab.com/gitlab-org/gitlab-runner/steps" "gitlab.com/gitlab-org/step-runner/pkg/api/client" "gitlab.com/gitlab-org/step-runner/schema/v1" ) type BuildRuntimeState string func (s BuildRuntimeState) String() string { return string(s) } const ( BuildRunStatePending BuildRuntimeState = "pending" BuildRunRuntimeRunning BuildRuntimeState = "running" BuildRunRuntimeSuccess BuildRuntimeState = "success" BuildRunRuntimeFailed BuildRuntimeState = "failed" BuildRunRuntimeCanceled BuildRuntimeState = "canceled" BuildRunRuntimeTerminated BuildRuntimeState = "terminated" BuildRunRuntimeTimedout BuildRuntimeState = "timedout" ) type ( BuildStage string JobExecutionMode string ) // WithContext is an interface that some Executor's ExecutorData will implement as a // mechanism for extending the build context and canceling if the executor cannot // complete the job. For example, the Autoscaler Executor will cancel the returned // context if the instance backing the job disappears. type WithContext interface { WithContext(context.Context) (context.Context, context.CancelFunc) } const ( BuildStageResolveSecrets BuildStage = "resolve_secrets" BuildStagePrepareExecutor BuildStage = "prepare_executor" BuildStagePrepare BuildStage = "prepare_script" BuildStageGetSources BuildStage = "get_sources" BuildStageClearWorktree BuildStage = "clear_worktree" BuildStageRestoreCache BuildStage = "restore_cache" BuildStageDownloadArtifacts BuildStage = "download_artifacts" BuildStageAfterScript BuildStage = "after_script" BuildStageArchiveOnSuccessCache BuildStage = "archive_cache" BuildStageArchiveOnFailureCache BuildStage = "archive_cache_on_failure" BuildStageUploadOnSuccessArtifacts BuildStage = "upload_artifacts_on_success" BuildStageUploadOnFailureArtifacts BuildStage = "upload_artifacts_on_failure" // We only renamed the variable name here as a first step to renaming the stage. // a separate issue will address changing the variable value, since it affects the // contract with the custom executor: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28152. BuildStageCleanup BuildStage = "cleanup_file_variables" ) type OnBuildStageFn func(stage BuildStage) func (fn OnBuildStageFn) Call(stage BuildStage) { if fn != nil { fn(stage) } } const ( JobExecutionModeSteps JobExecutionMode = "steps" JobExecutionModeTraditional JobExecutionMode = "traditional" JobExecutionModeUnknown JobExecutionMode = "unknown" ) func (m JobExecutionMode) OrUnknown() JobExecutionMode { if m == "" { return JobExecutionModeUnknown } return m } type OnJobExecutionModeDispatchedFn func(mode JobExecutionMode, executor string) func (fn OnJobExecutionModeDispatchedFn) Call(mode JobExecutionMode, executor string) { if fn != nil { fn(mode, executor) } } // staticBuildStages is a list of BuildStages which are executed on every build // and are not dynamically generated from steps. var staticBuildStages = []BuildStage{ BuildStagePrepare, BuildStageGetSources, BuildStageRestoreCache, BuildStageDownloadArtifacts, BuildStageAfterScript, BuildStageArchiveOnSuccessCache, BuildStageArchiveOnFailureCache, BuildStageUploadOnSuccessArtifacts, BuildStageUploadOnFailureArtifacts, BuildStageCleanup, } var ( ErrJobCanceled = errors.New("canceled") ErrJobScriptTimeout = errors.New("script timeout") ) const ( ExecutorJobSectionAttempts = "EXECUTOR_JOB_SECTION_ATTEMPTS" ) // ErrSkipBuildStage is returned when there's nothing to be executed for the // build stage. var ErrSkipBuildStage = errors.New("skip build stage") type Build struct { spec.Job `yaml:",inline" inputs:"expand"` SystemInterrupt chan os.Signal `json:"-" yaml:"-"` RootDir string `json:"-" yaml:"-"` BuildDir string `json:"-" yaml:"-"` CacheDir string `json:"-" yaml:"-"` Hostname string `json:"-" yaml:"-"` Runner *RunnerConfig `json:"runner"` ExecutorData ExecutorData ExecutorFeatures FeaturesInfo `json:"-" yaml:"-"` ExecutorProvider ExecutorProvider `json:"-" yaml:"-"` SafeDirectoryCheckout bool `json:"-" yaml:"-"` // Unique ID for all running builds on this runner RunnerID int `json:"runner_id"` // Unique ID for all running builds on this runner and this project ProjectRunnerID int `json:"project_runner_id"` // CurrentStage(), CurrentState() and CurrentExecutorStage() are called // from the metrics go routine whilst a build is in-flight, so access // to these variables requires a lock. statusLock sync.Mutex currentStage BuildStage currentState BuildRuntimeState executorStageResolver func() ExecutorStage stepDispatchedInScript bool failureReason spec.JobFailureReason secretsResolver func(l logger, registry SecretResolverRegistry, featureFlagOn func(string) bool) (SecretsResolver, error) Session *session.Session logger buildlogger.Logger allVariables spec.Variables secretsVariables spec.Variables buildSettings *BuildSettings startedAt time.Time finishedAt time.Time Referees []referees.Referee ArtifactUploader func(config JobCredentials, bodyProvider ContentProvider, options ArtifactsOptions) (UploadState, string) urlHelper *url_helpers.GitAuthHelper OnBuildStageStartFn OnBuildStageFn OnBuildStageEndFn OnBuildStageFn OnJobExecutionModeDispatchedFn OnJobExecutionModeDispatchedFn } func (b *Build) setCurrentStage(stage BuildStage) { b.statusLock.Lock() defer b.statusLock.Unlock() b.currentStage = stage } func (b *Build) CurrentStage() BuildStage { b.statusLock.Lock() defer b.statusLock.Unlock() return b.currentStage } func (b *Build) setCurrentState(state BuildRuntimeState) { b.statusLock.Lock() defer b.statusLock.Unlock() b.currentState = state } func (b *Build) setCurrentStateIf(existingState BuildRuntimeState, newState BuildRuntimeState) { b.statusLock.Lock() defer b.statusLock.Unlock() if b.currentState != existingState { return } b.currentState = newState } func (b *Build) markStepDispatchedInScript() { b.statusLock.Lock() defer b.statusLock.Unlock() b.stepDispatchedInScript = true } func (b *Build) DispatchedJobExecutionMode() JobExecutionMode { b.statusLock.Lock() defer b.statusLock.Unlock() if b.stepDispatchedInScript { return JobExecutionModeSteps } return JobExecutionModeTraditional } func (b *Build) recordDispatchedExecutionMode() { b.OnJobExecutionModeDispatchedFn.Call(b.DispatchedJobExecutionMode(), b.Runner.Executor) } func (b *Build) CurrentState() BuildRuntimeState { b.statusLock.Lock() defer b.statusLock.Unlock() return b.currentState } func (b *Build) FailureReason() spec.JobFailureReason { return b.failureReason } func (b *Build) Log() *logrus.Entry { l := b.Runner.Log(). WithFields(logrus.Fields{ "job": b.ID, "pipeline_id": b.JobInfo.PipelineID, "project": b.JobInfo.ProjectID, "project_full_path": b.JobInfo.ProjectFullPath, "namespace_id": b.JobInfo.NamespaceID, "root_namespace_id": b.JobInfo.RootNamespaceID, "organization_id": b.JobInfo.OrganizationID, "gitlab_user_id": b.JobInfo.UserID, }) if b.JobInfo.ScopedUserID != nil { l = l.WithField("gitlab_scoped_user_id", *b.JobInfo.ScopedUserID) } // this is only set after the prepare stage has run if b.Hostname != "" { l = l.WithField("name", b.Hostname) } // executor-specific log fields for k, v := range GetExecutorLogFields(b.ExecutorData) { l = l.WithField(k, v) } return l } // ProjectUniqueShortName returns a unique name for the current build. // It is similar to ProjectUniqueName but removes unnecessary string // and adds the current BuildID as an additional composition to the unique string func (b *Build) ProjectUniqueShortName() string { projectUniqueName := fmt.Sprintf( "runner-%s-%d-%d-%d", b.Runner.ShortDescription(), b.JobInfo.ProjectID, b.ProjectRunnerID, b.ID, ) return dns.MakeRFC1123Compatible(projectUniqueName) } // ProjectUniqueName returns a unique name for a runner && project. It uses the runner's short description, thus uses a // truncated token in it's human readable form. func (b *Build) ProjectUniqueName() string { projectUniqueName := fmt.Sprintf( "runner-%s-project-%d-concurrent-%d", b.Runner.ShortDescription(), b.JobInfo.ProjectID, b.ProjectRunnerID, ) return dns.MakeRFC1123Compatible(projectUniqueName) } // ProjectRealUniqueName is similar to its sister methods, and returns a unique name for the runner && project. // It uses the following parts to generate a truncated¹ sha256 sum: // - the runner's full token // - the runner's system ID // - the project ID // - the project runner ID // // With that the name is not susceptible to name clashes, when tokens are similar enough and therefore are the same // after getting the runner's short description (i.e. after the token has been truncated) // // ¹ we truncate the resulting sum from original 32 bytes to 16 bytes, to give us and users a shorter name, thus shorter // volume names when used in the docker volume manager. Truncating to 16 bytes (32 chars when hex encoded, the same // length as an hex encoded md5sum) is cryptographically sound, it's still strong against collisions. func (b *Build) ProjectRealUniqueName() string { const byteLen = 16 data := fmt.Sprintf("%s-%s-%d-%d", b.Runner.GetToken(), b.Runner.GetSystemID(), b.JobInfo.ProjectID, b.ProjectRunnerID, ) sum := sha256.Sum256([]byte(data)) return "runner-" + hex.EncodeToString(sum[:byteLen]) } func (b *Build) GetNetworkName() string { return b.ProjectUniqueShortName() } func (b *Build) ProjectSlug() (string, error) { url, err := url.Parse(b.GitInfo.RepoURL) if err != nil { return "", err } if url.Host == "" { return "", errors.New("only URI reference supported") } slug := url.Path slug = strings.TrimSuffix(slug, ".git") slug = path.Clean(slug) if slug == "." { return "", errors.New("invalid path") } if strings.Contains(slug, "..") { return "", errors.New("it doesn't look like a valid path") } return slug, nil } func (b *Build) ProjectUniqueDir(sharedDir bool) string { dir, err := b.ProjectSlug() if err != nil { dir = fmt.Sprintf("project-%d", b.JobInfo.ProjectID) } // for shared dirs path is constructed like this: // /runner-short-id/concurrent-project-id/group-name/project-name/ // ex./01234567/0/group/repo/ if sharedDir { dir = path.Join( b.Runner.ShortDescription(), fmt.Sprintf("%d", b.ProjectRunnerID), dir, ) } if b.GetGitStrategy() == GitEmpty { dir += "-empty" } return dir } func (b *Build) FullProjectDir() string { return helpers.ToSlash(b.BuildDir) } func (b *Build) TmpProjectDir() string { return helpers.ToSlash(b.BuildDir) + ".tmp" } // BuildStages returns a list of all BuildStages which will be executed. // Not in the order of execution. func (b *Build) BuildStages() []BuildStage { stages := make([]BuildStage, len(staticBuildStages)) copy(stages, staticBuildStages) for _, s := range b.Steps { if s.Name == spec.StepNameAfterScript { continue } stages = append(stages, StepToBuildStage(s)) } return stages } func (b *Build) getCustomBuildDir(rootDir, dir string, customBuildDirEnabled, sharedDir bool) (string, error) { if dir == "" { return path.Join(rootDir, b.ProjectUniqueDir(sharedDir)), nil } if !customBuildDirEnabled { return "", MakeBuildError("setting GIT_CLONE_PATH is not allowed, enable `custom_build_dir` feature") } // See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/25913 relDir, err := filepath.Rel(helpers.ToSlash(rootDir), helpers.ToSlash(dir)) if err != nil { return "", &BuildError{Inner: err} } if strings.HasPrefix(relDir, "..") { return "", MakeBuildError("the GIT_CLONE_PATH=%q has to be within %q", dir, rootDir) } return path.Clean(dir), nil } func (b *Build) StartBuild( rootDir, cacheDir string, customBuildDirEnabled, sharedDir, safeDirectoryCheckout bool, ) error { if rootDir == "" { return MakeBuildError("the builds_dir is not configured") } if cacheDir == "" { return MakeBuildError("the cache_dir is not configured") } b.SafeDirectoryCheckout = safeDirectoryCheckout // We set RootDir and invalidate variables // to be able to use CI_BUILDS_DIR b.RootDir = rootDir b.CacheDir = path.Join(cacheDir, b.ProjectUniqueDir(false)) b.RefreshAllVariables() var err error b.BuildDir, err = b.getCustomBuildDir(b.RootDir, b.Settings().GitClonePath, customBuildDirEnabled, sharedDir) if err != nil { return err } // modify copied config for any feature flag if b.Runner.Cache != nil { switch { case b.Runner.Cache.Type == "gcs" && !b.IsFeatureFlagOn(featureflags.UseLegacyGCSCacheAdapter): b.Runner.Cache.Type = "gcsv2" case b.Runner.Cache.Type == "s3" && !b.IsFeatureFlagOn(featureflags.UseLegacyS3CacheAdapter): b.Runner.Cache.Type = "s3v2" } } // We invalidate variables to be able to use // CI_CACHE_DIR and CI_PROJECT_DIR b.RefreshAllVariables() return nil } //nolint:gocognit func (b *Build) executeStepStage(ctx context.Context, connector steps.Connector, buildStage BuildStage, req []schema.Step, registerCancel func(context.CancelFunc)) error { if ctx.Err() != nil { return ctx.Err() } b.OnBuildStageStartFn.Call(buildStage) defer b.OnBuildStageEndFn.Call(buildStage) b.setCurrentStage(buildStage) b.Log().WithField("build_stage", buildStage).Debug("Executing build stage") section := helpers.BuildSection{ Name: string(buildStage), SkipMetrics: !b.Job.Features.TraceSections, Run: func() error { msg := fmt.Sprintf( "%s%s%s", helpers.ANSI_BOLD_CYAN, GetStageDescription(buildStage), helpers.ANSI_RESET, ) b.logger.Println(msg) // todo: step-runner should eventually: // - format its own logs to the Runner log spec // - provides its own timestamps and mask its own secrets // for now though, we wrap its logs providing this, and treat everything as stdout stdout := b.logger.Stream(buildlogger.StreamWorkLevel, buildlogger.Stdout) defer stdout.Close() return wrapStepStageErr(steps.Execute(ctx, steps.Options{ Connector: connector, JobInfo: steps.JobInfo{ ID: b.ID, Timeout: b.GetBuildTimeout(), ProjectDir: b.FullProjectDir(), Variables: b.GetAllVariables(), }, Steps: req, Trace: stdout, RegisterCancel: registerCancel, Log: b.Log(), })) }, } return section.Execute(&b.logger) } func wrapStepStageErr(err error) error { if err == nil { return nil } if errors.Is(err, steps.ErrNoStepRunnerButOkay) { return nil } berr := &BuildError{Inner: err} // Classify step-runner internal failures (gRPC handler panics and // ErrorInternal job statuses) as ScriptFailure rather than // RunnerSystemFailure: a malicious job could deliberately trigger either // path to forge a RunnerSystemFailure and evade job-failure accounting. var cierr *steps.ClientInternalError if errors.As(err, &cierr) { berr.FailureReason = ScriptFailure } var cserr *steps.ClientStatusError if errors.As(err, &cserr) { switch cserr.Status.ErrorKind { case client.ErrorInternal, client.ErrorStepFailure: berr.FailureReason = ScriptFailure case client.ErrorCancelled: berr.FailureReason = JobCanceled berr.Inner = ErrJobCanceled case client.ErrorTimeout: berr.FailureReason = JobExecutionTimeout case client.ErrorUnknown: berr.FailureReason = UnknownFailure } } // hack: for now, we parse the exit code from the error response // later we might want to introduce a proper exit code from the step-runner // https://gitlab.com/gitlab-org/step-runner/-/work_items/349 if before, code, ok := strings.Cut(err.Error(), "exit status"); ok { if exitCode, err := strconv.Atoi(strings.TrimSpace(code)); err == nil { berr.ExitCode = NormalizeExitCode(exitCode) // Normalize "exit status N" (Go's exec.ExitError format) to "exit code N" // to match the legacy Docker executor format (wait.go uses // fmt.Errorf("exit code %d", statusCode)). The prefix (e.g. "step release: ") // is preserved so the trace message retains the failing step name. berr.Inner = fmt.Errorf("%sexit code %d", strings.TrimRightFunc(before, unicode.IsSpace), exitCode) } } // If no exit code was found via "exit status" parsing, propagate the exit // code from an inner BuildError if one exists. This handles the case where // Docker's Connect() returns BuildError{ExitCode: N} (container exits before // step-runner is ready) — that path already uses "exit code N" format so the // string-cut above does not match. if berr.ExitCode == 0 { var innerBuildErr *BuildError if errors.As(err, &innerBuildErr) && innerBuildErr.ExitCode != 0 { berr.ExitCode = innerBuildErr.ExitCode } } return berr } //nolint:gocognit func (b *Build) executeStage(ctx context.Context, buildStage BuildStage, executor Executor) error { if connector, ok := executor.(steps.Connector); b.UseNativeSteps() && ok { if handled, steps := stepDispatch(b, executor, buildStage); handled { b.markStepDispatchedInScript() err := b.executeStepStage(ctx, connector, buildStage, steps, nil) // The defer below is never reached for the step-dispatch path, // so we replicate its timeout warning here. We check ctx.Err() // rather than the returned error because gRPC wraps deadline // exceeded as a status error that does not unwrap to // context.DeadlineExceeded. if err != nil && errors.Is(ctx.Err(), context.DeadlineExceeded) { b.logger.Warningln( string(buildStage) + " could not run to completion because the timeout was exceeded. " + "For more control over job and script timeouts see: " + "https://docs.gitlab.com/ci/runners/configure_runners/#set-script-and-after_script-timeouts") } return err } } if ctx.Err() != nil { return ctx.Err() } b.OnBuildStageStartFn.Call(buildStage) defer b.OnBuildStageEndFn.Call(buildStage) b.setCurrentStage(buildStage) b.Log().WithField("build_stage", buildStage).Debug("Executing build stage") defer func() { if errors.Is(ctx.Err(), context.DeadlineExceeded) { b.logger.Warningln( string(buildStage) + " could not run to completion because the timeout was exceeded. " + "For more control over job and script timeouts see: " + "https://docs.gitlab.com/ci/runners/configure_runners/#set-script-and-after_script-timeouts") } }() shell := executor.Shell() if shell == nil { return errors.New("no shell defined") } script, err := GenerateShellScript(ctx, buildStage, *shell) if errors.Is(err, ErrSkipBuildStage) { if b.IsFeatureFlagOn(featureflags.SkipNoOpBuildStages) { b.Log().WithField("build_stage", buildStage).Debug("Skipping stage (nothing to do)") return nil } err = nil } if err != nil { return err } // Nothing to execute if script == "" { return nil } cmd := ExecutorCommand{ Context: ctx, Script: script, Stage: buildStage, Predefined: getPredefinedEnv(buildStage), } section := helpers.BuildSection{ Name: string(buildStage), SkipMetrics: !b.Job.Features.TraceSections, Run: func() error { msg := fmt.Sprintf( "%s%s%s", helpers.ANSI_BOLD_CYAN, GetStageDescription(buildStage), helpers.ANSI_RESET, ) b.logger.Println(msg) return executor.Run(cmd) }, } return section.Execute(&b.logger) } // getPredefinedEnv returns whether a stage should be executed on // the predefined environment that GitLab Runner provided. func getPredefinedEnv(buildStage BuildStage) bool { env := map[BuildStage]bool{ BuildStagePrepare: true, BuildStageGetSources: true, BuildStageClearWorktree: true, BuildStageRestoreCache: true, BuildStageDownloadArtifacts: true, BuildStageAfterScript: false, BuildStageArchiveOnSuccessCache: true, BuildStageArchiveOnFailureCache: true, BuildStageUploadOnFailureArtifacts: true, BuildStageUploadOnSuccessArtifacts: true, BuildStageCleanup: true, } predefined, ok := env[buildStage] if !ok { return false } return predefined } func GetStageDescription(stage BuildStage) string { descriptions := map[BuildStage]string{ BuildStagePrepare: "Preparing environment", BuildStageGetSources: "Getting source from Git repository", BuildStageClearWorktree: "Deleting all tracked and untracked files due to source fetch failure", BuildStageRestoreCache: "Restoring cache", BuildStageDownloadArtifacts: "Downloading artifacts", BuildStageAfterScript: "Running after_script", BuildStageArchiveOnSuccessCache: "Saving cache for successful job", BuildStageArchiveOnFailureCache: "Saving cache for failed job", BuildStageUploadOnFailureArtifacts: "Uploading artifacts for failed job", BuildStageUploadOnSuccessArtifacts: "Uploading artifacts for successful job", BuildStageCleanup: "Cleaning up project directory and file based variables", } description, ok := descriptions[stage] if !ok { return fmt.Sprintf("Executing %q stage of the job script", stage) } return description } func (b *Build) executeUploadArtifacts(ctx context.Context, state error, executor Executor) (err error) { if state == nil { return b.executeStage(ctx, BuildStageUploadOnSuccessArtifacts, executor) } return b.executeStage(ctx, BuildStageUploadOnFailureArtifacts, executor) } func (b *Build) executeArchiveCache(ctx context.Context, state error, executor Executor) (err error) { if state == nil { return b.executeStage(ctx, BuildStageArchiveOnSuccessCache, executor) } return b.executeStage(ctx, BuildStageArchiveOnFailureCache, executor) } func (b *Build) executeScript(ctx context.Context, trace JobTrace, executor Executor) error { // track job start and create referees startTime := time.Now() b.createReferees(executor) _, hasStepRunnerConnector := executor.(steps.Connector) if b.IsFeatureFlagOn(featureflags.UseConcrete) && len(b.Job.Run) == 0 && hasStepRunnerConnector { concreteSteps, err := stagesToConcreteStep(ctx, executor) if err != nil { return err } // Concrete dispatches the whole job through step-runner; record it // as the "steps" execution mode so jobs flowing through this path // show up in the gitlab_runner_job_execution_mode_total counter // and in trace.Fail data sent to GitLab. b.markStepDispatchedInScript() defer b.recordDispatchedExecutionMode() // Route user cancellation through step-runner's Cancel API so the // concrete step's post-cancel phases (e.g. cache/artifact upload) // can run. This intentionally replaces the build-ctx cancel // configureTrace installed: we want step-runner to drive the // graceful shutdown, and the resulting cancelled status maps to // JobCanceled via wrapStepStageErr. //nolint:errcheck err = b.executeStepStage(ctx, executor.(steps.Connector), "concrete", concreteSteps, trace.SetCancelFunc) b.executeUploadReferees(ctx, startTime, time.Now()) return err } err, cont := b.executePrepareScripts(ctx, executor) if !cont { return err } // execute user provided scripts //nolint:nestif if err == nil { defer b.recordDispatchedExecutionMode() if b.UseNativeSteps() && len(b.Job.Run) > 0 { if !hasStepRunnerConnector { return ExecutorStepRunnerConnectNotSupported } err = b.executeStage(ctx, stepRunBuildStage, executor) } else { err = b.executeUserScripts(ctx, trace, executor) } } // upload cache, upload artifacts, pick priority error err = b.pickPriorityError( err, b.executeArchiveCache(ctx, err, executor), b.executeUploadArtifacts(ctx, err, executor), ) // track job end and execute referees b.executeUploadReferees(ctx, startTime, time.Now()) b.removeFileBasedVariables(ctx, executor) return err } func (b *Build) executePrepareScripts(ctx context.Context, executor Executor) (error, bool) { // Prepare stage err := b.executeStage(ctx, BuildStagePrepare, executor) if err != nil { return fmt.Errorf( "prepare environment: %w. "+ "Check https://docs.gitlab.com/runner/shells/#shell-profile-loading for more information", err, ), false } err = b.attemptExecuteStage(ctx, BuildStageGetSources, executor, b.GetGetSourcesAttempts(), func(attempt int) error { if attempt == 1 { // If GetSources fails we delete all tracked and untracked files. This is // because Git's submodule support has various bugs that cause fetches to // fail if submodules have changed. return b.executeStage(ctx, BuildStageClearWorktree, executor) } return nil }) if err == nil { err = b.attemptExecuteStage(ctx, BuildStageRestoreCache, executor, b.GetRestoreCacheAttempts(), nil) } if err == nil { err = b.attemptExecuteStage(ctx, BuildStageDownloadArtifacts, executor, b.GetDownloadArtifactsAttempts(), nil) } return err, true } func (b *Build) executeUserScripts(ctx context.Context, trace JobTrace, executor Executor) error { var err error timeouts := b.getStageTimeoutContexts(ctx, stageTimeout{"RUNNER_SCRIPT_TIMEOUT", 0}, stageTimeout{"RUNNER_AFTER_SCRIPT_TIMEOUT", AfterScriptTimeout}) scriptCtx, cancel := timeouts["RUNNER_SCRIPT_TIMEOUT"]() defer cancel() // update trace's cancel function so that the main script can be cancelled, // with after_script and later stages to still complete. trace.SetCancelFunc(cancel) b.printPolicyOptions() for _, s := range b.Steps { // after_script has a separate BuildStage. See common.BuildStageAfterScript if s.Name == spec.StepNameAfterScript { continue } err = b.executeStage(scriptCtx, StepToBuildStage(s), executor) if err != nil { break } } switch { // if parent context is fine but script context was cancelled we ensure the build error // failure reason is "canceled". case ctx.Err() == nil && errors.Is(scriptCtx.Err(), context.Canceled): err = &BuildError{ Inner: ErrJobCanceled, FailureReason: JobCanceled, } b.logger.Warningln("script canceled externally (UI, API)") // If the parent context reached deadline, don't do anything different than usual. // If the script context reached deadline, return the deadline error. case !errors.Is(ctx.Err(), context.DeadlineExceeded) && errors.Is(scriptCtx.Err(), context.DeadlineExceeded): err = &BuildError{ Inner: fmt.Errorf("%w: %w", ErrJobScriptTimeout, scriptCtx.Err()), FailureReason: JobExecutionTimeout, } } afterScriptCtx, cancel := timeouts["RUNNER_AFTER_SCRIPT_TIMEOUT"]() defer cancel() if afterScriptErr := b.executeAfterScript(afterScriptCtx, err, executor); afterScriptErr != nil { // the parent deadline being exceeded is reported at a later stage, so we // only focus on errors specific to after_script here. if !errors.Is(ctx.Err(), context.DeadlineExceeded) { // By default after-script ignores errors, but this can // be disabled via the AFTER_SCRIPT_IGNORE_ERRORS variable. if b.Settings().AfterScriptIgnoreErrors { b.logger.Warningln("after_script failed, but job will continue unaffected:", afterScriptErr) } else if err == nil { // If there's an existing error don't overwrite it with // the after-script error. err = afterScriptErr } } } return err } func (b *Build) pickPriorityError(jobErr error, archiveCacheErr error, artifactUploadErr error) error { // Use job's errors which came before upload errors as most important to surface if jobErr != nil { return jobErr } // Otherwise, use uploading errors if archiveCacheErr != nil { return archiveCacheErr } return artifactUploadErr } func (b *Build) executeAfterScript(ctx context.Context, err error, executor Executor) error { state, _ := b.runtimeStateAndError(err) b.GetAllVariables().OverwriteKey("CI_JOB_STATUS", spec.Variable{ Key: "CI_JOB_STATUS", Value: string(state), }) return b.executeStage(ctx, BuildStageAfterScript, executor) } // StepToBuildStage returns the BuildStage corresponding to a step. func StepToBuildStage(s spec.Step) BuildStage { return BuildStage(fmt.Sprintf("step_%s", strings.ToLower(string(s.Name)))) } func (b *Build) createReferees(executor Executor) { b.Referees = referees.CreateReferees(executor, b.Runner.Referees, b.Log()) } func (b *Build) removeFileBasedVariables(ctx context.Context, executor Executor) { err := b.executeStage(ctx, BuildStageCleanup, executor) if err != nil { b.Log().WithError(err).Warning("Error while executing file based variables removal script") } } func (b *Build) executeUploadReferees(ctx context.Context, startTime, endTime time.Time) { if b.Referees == nil || b.ArtifactUploader == nil { b.Log().Debug("Skipping referees execution") return } jobCredentials := JobCredentials{ ID: b.Job.ID, Token: b.Job.Token, URL: b.Runner.RunnerCredentials.URL, } // execute and upload the results of each referee for _, referee := range b.Referees { if referee == nil { continue } reader, err := referee.Execute(ctx, startTime, endTime) // keep moving even if a subset of the referees have failed if err != nil { continue } bodyProvider := StreamProvider{ ReaderFactory: func() (io.ReadCloser, error) { return io.NopCloser(reader), nil }, } // referee ran successfully, upload its results to GitLab as an artifact b.ArtifactUploader(jobCredentials, bodyProvider, ArtifactsOptions{ BaseName: referee.ArtifactBaseName(), Type: referee.ArtifactType(), Format: spec.ArtifactFormat(referee.ArtifactFormat()), }) } } func (b *Build) attemptExecuteStage( ctx context.Context, buildStage BuildStage, executor Executor, attempts int, retryCallback func(attempt int) error, ) error { if attempts < 1 || attempts > 10 { return fmt.Errorf("number of attempts out of the range [1, 10] for stage: %s", buildStage) } retry := backoff.Backoff{ Min: 5 * time.Second, Max: 5 * time.Minute, Jitter: true, Factor: 1.5, } var err error for attempt := range attempts { if retryCallback != nil { if err = retryCallback(attempt); err != nil { continue } } if err = b.executeStage(ctx, buildStage, executor); err == nil { return nil } if attempt == attempts-1 { break } if b.IsFeatureFlagOn(featureflags.UseExponentialBackoffStageRetry) { duration := retry.Duration() b.logger.Infoln(fmt.Sprintf("Retrying in %v", duration)) time.Sleep(duration) } } return err } func (b *Build) GetBuildTimeout() time.Duration { buildTimeout := b.RunnerInfo.Timeout if buildTimeout <= 0 { buildTimeout = DefaultTimeout } return time.Duration(buildTimeout) * time.Second } // GetPrepareTimeout returns the timeout for the prepare stage. // If prepare_timeout is not set or invalid, it defaults to the build timeout. func (b *Build) GetPrepareTimeout() time.Duration { buildTimeout := b.GetBuildTimeout() if b.Runner == nil || b.Runner.PrepareTimeout == nil { return buildTimeout } prepareTimeout := *b.Runner.PrepareTimeout if prepareTimeout <= 0 { b.Log().Warningf("prepare_timeout (%s) must be greater than 0; using job timeout (%s)", prepareTimeout, buildTimeout) return buildTimeout } if prepareTimeout > buildTimeout { b.Log().Warningf("prepare_timeout (%s) exceeds job timeout (%s); using job timeout", prepareTimeout, buildTimeout) return buildTimeout } return prepareTimeout } func (b *Build) handleError(err error) error { state, err := b.runtimeStateAndError(err) b.setCurrentState(state) return err } func (b *Build) runtimeStateAndError(err error) (BuildRuntimeState, error) { switch { case errors.Is(err, context.Canceled), errors.Is(err, ErrJobCanceled): return BuildRunRuntimeCanceled, &BuildError{ Inner: ErrJobCanceled, FailureReason: JobCanceled, } case errors.Is(err, context.DeadlineExceeded), errors.Is(err, ErrJobScriptTimeout): return BuildRunRuntimeTimedout, &BuildError{ Inner: fmt.Errorf("execution took longer than %v seconds", b.GetBuildTimeout()), FailureReason: JobExecutionTimeout, } case err == nil: return BuildRunRuntimeSuccess, nil default: return BuildRunRuntimeFailed, err } } func (b *Build) run(ctx context.Context, trace JobTrace, executor Executor) (err error) { b.setCurrentState(BuildRunRuntimeRunning) buildFinish := make(chan error, 1) buildPanic := make(chan error, 1) runContext, runCancel := context.WithCancel(ctx) defer runCancel() if term, ok := executor.(terminal.InteractiveTerminal); b.Session != nil && ok { b.Session.SetInteractiveTerminal(term) } if proxyPooler, ok := executor.(proxy.Pooler); b.Session != nil && ok { b.Session.SetProxyPool(proxyPooler) } // Run build script go func() { defer func() { if r := recover(); r != nil { err := &BuildError{FailureReason: RunnerSystemFailure, Inner: fmt.Errorf("panic: %s", r)} b.Log().WithError(err).Error(string(debug.Stack())) buildPanic <- err } }() buildFinish <- b.executeScript(runContext, trace, executor) }() // Wait for signals: cancel, timeout, abort or finish b.Log().Debugln("Waiting for signals...") select { case <-ctx.Done(): err = b.handleError(context.Cause(ctx)) case signal := <-b.SystemInterrupt: err = &BuildError{ Inner: fmt.Errorf("aborted: %v", signal), FailureReason: RunnerSystemFailure, } b.setCurrentState(BuildRunRuntimeTerminated) case err = <-buildFinish: // It's possible that the parent context being cancelled will // terminate the build early, bringing us here, and although we handle // `ctx.Done()` above, select statements are not ordered. // We handle this the same as if we received ctx.Done(), but // return early because we're no longer waiting for the build // to finish. if ctx.Err() != nil { return b.handleError(context.Cause(ctx)) } if err != nil { b.setCurrentState(BuildRunRuntimeFailed) } else { b.setCurrentState(BuildRunRuntimeSuccess) } return err case err = <-buildPanic: b.setCurrentState(BuildRunRuntimeTerminated) return err } b.Log().WithError(err).Debugln("Waiting for build to finish...") // Wait till we receive that build did finish runCancel() b.waitForBuildFinish(buildFinish, WaitForBuildFinishTimeout) return err } // waitForBuildFinish will wait for the build to finish or timeout, whichever // comes first. This is to prevent issues where something in the build can't be // killed or processed and results into the Job running until the GitLab Runner // process exists. func (b *Build) waitForBuildFinish(buildFinish <-chan error, timeout time.Duration) { select { case <-buildFinish: return case <-time.After(timeout): b.logger.Warningln("Timed out waiting for the build to finish") return } } func (b *Build) retryCreateExecutor( options ExecutorPrepareOptions, provider ExecutorProvider, logger buildlogger.Logger, ) (Executor, error) { var err error for tries := 0; tries < PreparationRetries; tries++ { executor := provider.Create() if executor == nil { return nil, errors.New("failed to create executor") } b.setExecutorStageResolver(executor.GetCurrentStage) err = executor.Prepare(options) if err == nil { return executor, nil } executor.Cleanup() var buildErr *BuildError if errors.As(err, &buildErr) { return nil, err } else if options.Context.Err() != nil { return nil, b.handleError(context.Cause(options.Context)) } logger.SoftErrorln("Preparation failed:", err) logger.Infoln("Will be retried in", PreparationRetryInterval, "...") // Wait for retry interval or context cancellation timer := time.NewTimer(PreparationRetryInterval) select { case <-timer.C: case <-options.Context.Done(): timer.Stop() return nil, b.handleError(context.Cause(options.Context)) } } return nil, err } func (b *Build) waitForTerminal(ctx context.Context, timeout time.Duration) error { if b.Session == nil || !b.Session.Connected() { return nil } timeout = b.getTerminalTimeout(ctx, timeout) b.logger.Infoln( fmt.Sprintf( "Terminal is connected, will time out in %s...", timeout.Round(time.Second), ), ) select { case <-ctx.Done(): err := b.Session.Kill() if err != nil { b.Log().WithError(err).Warn("Failed to kill session") } return errors.New("build cancelled, killing session") case <-time.After(timeout): err := fmt.Errorf( "terminal session timed out (maximum time allowed - %s)", timeout.Round(time.Second), ) b.logger.Infoln(err.Error()) b.Session.TimeoutCh <- err return err case err := <-b.Session.DisconnectCh: b.logger.Infoln("Terminal disconnected") return fmt.Errorf("terminal disconnected: %w", err) case signal := <-b.SystemInterrupt: b.logger.Infoln("Terminal disconnected") err := b.Session.Kill() if err != nil { b.Log().WithError(err).Warn("Failed to kill session") } return fmt.Errorf("terminal disconnected by system signal: %v", signal) } } // getTerminalTimeout checks if the job timeout comes before the // configured terminal timeout. func (b *Build) getTerminalTimeout(ctx context.Context, timeout time.Duration) time.Duration { expiryTime, _ := ctx.Deadline() if expiryTime.Before(time.Now().Add(timeout)) { timeout = time.Until(expiryTime) } return timeout } // setTraceStatus sets the final status of a job. If the err // is nil, the job is successful. // // What we send back to GitLab for a failure reason when the err // is not nil depends: // // If the error can be unwrapped to `BuildError`, the BuildError's // failure reason is given. If the failure reason is not supported // by GitLab, it's converted to an `UnknownFailure`. If the failure // reason is not specified, `ScriptFailure` is used. // // If an error cannot be unwrapped to `BuildError`, `SystemFailure` // is used as the failure reason. func (b *Build) setTraceStatus(trace JobTrace, err error) { logger := b.Log().WithFields(logrus.Fields{ "duration_s": b.FinalDuration().Seconds(), }) buildLogger := b.getNewLogger(trace, logger, true) defer buildLogger.Close() if err == nil { logger.WithFields(logrus.Fields{"job-status": "success"}).Infoln("Job succeeded") buildLogger.Infoln("Job succeeded") logTerminationError(buildLogger, "Success", trace.Success()) return } b.setCurrentStateIf(BuildRunStatePending, BuildRunRuntimeFailed) var buildError *BuildError if errors.As(err, &buildError) { b.failureReason = buildError.FailureReason msg := fmt.Sprint("Job failed: ", err) if buildError.FailureReason == RunnerSystemFailure { msg = fmt.Sprint("Job failed (system failure): ", err) } logger. WithFields(logrus.Fields{ "job-status": "failed", "error": err, "failure_reason": buildError.FailureReason, "exit_code": buildError.ExitCode, }). Warningln(msg) buildLogger.SoftErrorln(msg) trace.SetSupportedFailureReasonMapper(newFailureReasonMapper(b.Features.FailureReasons)) err = trace.Fail(err, JobFailureData{ Reason: buildError.FailureReason, ExitCode: buildError.ExitCode, Mode: b.DispatchedJobExecutionMode(), }) logTerminationError(buildLogger, "Fail", err) return } logger. WithFields(logrus.Fields{ "job-status": "failed", "error": err, "failure_reason": RunnerSystemFailure, }). Errorln("Job failed (system failure):", err) buildLogger.Errorln("Job failed (system failure):", err) logTerminationError(buildLogger, "Fail", trace.Fail(err, JobFailureData{Reason: RunnerSystemFailure, Mode: b.DispatchedJobExecutionMode()})) } func logTerminationError(logger buildlogger.Logger, name string, err error) { if err == nil { return } logger.WithFields(logrus.Fields{"error": err}).Errorln(fmt.Sprintf("Job trace termination %q failed", name)) } func (b *Build) setExecutorStageResolver(resolver func() ExecutorStage) { b.statusLock.Lock() defer b.statusLock.Unlock() b.executorStageResolver = resolver } func (b *Build) CurrentExecutorStage() ExecutorStage { b.statusLock.Lock() defer b.statusLock.Unlock() if b.executorStageResolver == nil { return ExecutorStage("") } return b.executorStageResolver() } func (b *Build) Run(globalConfig *Config, trace JobTrace) (err error) { b.setCurrentState(BuildRunStatePending) // These defers are ordered because runBuild could panic and the recover needs to handle that panic. // setTraceStatus needs to be last since it needs a correct error value to report the job's status defer func() { b.ensureFinishedAt() b.setTraceStatus(trace, err) }() defer func() { if r := recover(); r != nil { err = &BuildError{FailureReason: RunnerSystemFailure, Inner: fmt.Errorf("panic: %s", r)} b.Log().WithError(err).Error(string(debug.Stack())) } }() err = b.expandInputs() if err != nil { return &BuildError{FailureReason: ConfigurationError, Inner: err} } b.logUsedImages() b.printRunningWithHeader(trace) err = b.resolveSecrets(trace) if err != nil { return err } b.expandContainerOptions() b.logger = b.getNewLogger(trace, b.Log(), false) defer b.logger.Close() ctx, cancel := context.WithTimeout(context.Background(), b.GetBuildTimeout()) defer cancel() b.configureTrace(trace, cancel) b.printSettingErrors() options := b.createExecutorPrepareOptions(ctx, globalConfig) provider := b.ExecutorProvider if provider == nil { return errors.New("executor not found") } err = provider.GetFeatures(&b.ExecutorFeatures) if err != nil { return fmt.Errorf("retrieving executor features: %w", err) } executor, err := b.executeBuildSection(options, provider) if err != nil { return err } defer executor.Cleanup() // override context that can be canceled by the executor if supported if withContext, ok := b.ExecutorData.(WithContext); ok { ctx, cancel = withContext.WithContext(ctx) defer cancel() } err = b.run(ctx, trace, executor) if errWait := b.waitForTerminal(ctx, globalConfig.SessionServer.GetSessionTimeout()); errWait != nil { b.Log().WithError(errWait).Debug("Stopped waiting for terminal") } executor.Finish(err) return err } // expandInputs expands inputs in various build configuration settings. // // TODO: we want to expand inputs as early as possible to optimize the feedback loop. // However, that may lead to problems where certain expansion context is only available later on. // This might not be a problem for Inputs itself, but for functions (like `now()`) or // when we allow other context in the expression, like access to environment variables, // or other job-runtime dependent features. // For a good middle ground we could parse the scripts as moa expressions and cache them // and only later on evaluate given the necessary context. func (b *Build) expandInputs() error { if !b.IsFeatureFlagOn(featureflags.EnableJobInputsInterpolation) { return nil } return spec.ExpandInputs(&b.Inputs, b) } func (b *Build) getNewLogger(trace JobTrace, log *logrus.Entry, teeOnly bool) buildlogger.Logger { return buildlogger.New( trace, log, buildlogger.Options{ MaskPhrases: b.GetAllVariables().Masked(), MaskTokenPrefixes: b.Job.Features.TokenMaskPrefixes, Timestamping: b.IsFeatureFlagOn(featureflags.UseTimestamps), MaskAllDefaultTokens: b.IsFeatureFlagOn(featureflags.MaskAllDefaultTokens), TeeOnly: teeOnly, }, ) } func (b *Build) logUsedImages() { if !b.IsFeatureFlagOn(featureflags.LogImagesConfiguredForJob) { return } fields := func(i spec.Image) logrus.Fields { if i.Name == "" { return nil } fields := logrus.Fields{ "image_name": i.Name, } if i.ExecutorOptions.Docker.Platform != "" { fields["image_platform"] = i.ExecutorOptions.Docker.Platform } return fields } imageFields := fields(b.Job.Image) if imageFields != nil { b.Log().WithFields(imageFields).Info("Image configured for job") } for _, service := range b.Job.Services { b.Log().WithFields(fields(service)).Info("Service image configured for job") } } func (b *Build) configureTrace(trace JobTrace, cancel context.CancelFunc) { trace.SetCancelFunc(cancel) trace.SetAbortFunc(cancel) } func (b *Build) createExecutorPrepareOptions(ctx context.Context, globalConfig *Config) ExecutorPrepareOptions { return ExecutorPrepareOptions{ Config: b.Runner, Build: b, BuildLogger: b.logger, User: globalConfig.User, Context: ctx, } } func (b *Build) resolveSecrets(trace JobTrace) error { if b.Secrets == nil { return nil } b.Secrets.ExpandVariables(b.GetAllVariables()) b.OnBuildStageStartFn.Call(BuildStageResolveSecrets) defer b.OnBuildStageEndFn.Call(BuildStageResolveSecrets) section := helpers.BuildSection{ Name: string(BuildStageResolveSecrets), SkipMetrics: !b.Job.Features.TraceSections, Run: func() error { logger := b.getNewLogger(trace, b.Log(), false) defer logger.Close() resolver, err := b.secretsResolver(&logger, GetSecretResolverRegistry(), b.IsFeatureFlagOn) if err != nil { return fmt.Errorf("creating secrets resolver: %w", err) } variables, err := resolver.Resolve(b.Secrets) if err != nil { return fmt.Errorf("resolving secrets: %w", err) } b.secretsVariables = variables b.RefreshAllVariables() return nil }, } return section.Execute(&b.logger) } func (b *Build) executeBuildSection(options ExecutorPrepareOptions, provider ExecutorProvider) (Executor, error) { var executor Executor var err error b.OnBuildStageStartFn.Call(BuildStagePrepareExecutor) defer b.OnBuildStageEndFn.Call(BuildStagePrepareExecutor) section := helpers.BuildSection{ Name: string(BuildStagePrepareExecutor), SkipMetrics: !b.Job.Features.TraceSections, Run: func() error { msg := fmt.Sprintf( "%sPreparing the %q executor%s", helpers.ANSI_BOLD_CYAN, b.Runner.Executor, helpers.ANSI_RESET, ) b.logger.Println(msg) executor, err = b.retryCreateExecutor(options, provider, b.logger) return err }, } err = section.Execute(&b.logger) return executor, err } func (b *Build) String() string { return helpers.ToYAML(b) } func (b *Build) platformAppropriatePath(s string) string { // Check if we're dealing with a Windows path on a Windows platform // filepath.VolumeName will return empty otherwise if filepath.VolumeName(s) != "" { return filepath.FromSlash(s) } return s } func (b *Build) GetDefaultVariables() spec.Variables { return spec.Variables{ { Key: "CI_BUILDS_DIR", Value: b.platformAppropriatePath(b.RootDir), Public: true, Internal: true, File: false, }, { Key: "CI_PROJECT_DIR", Value: b.platformAppropriatePath(b.FullProjectDir()), Public: true, Internal: true, File: false, }, { Key: "CI_CONCURRENT_ID", Value: strconv.Itoa(b.RunnerID), Public: true, Internal: true, File: false, }, { Key: "CI_CONCURRENT_PROJECT_ID", Value: strconv.Itoa(b.ProjectRunnerID), Public: true, Internal: true, File: false, }, { Key: "CI_SERVER", Value: "yes", Public: true, Internal: true, File: false, }, { Key: "CI_JOB_STATUS", Value: string(BuildRunRuntimeRunning), Public: true, Internal: true, }, { Key: "CI_JOB_TIMEOUT", Value: strconv.FormatInt(int64(b.GetBuildTimeout().Seconds()), 10), Public: true, Internal: true, File: false, }, } } func (b *Build) GetDefaultFeatureFlagsVariables() spec.Variables { variables := make(spec.Variables, 0) for _, featureFlag := range featureflags.GetAll() { variables = append(variables, spec.Variable{ Key: featureFlag.Name, Value: strconv.FormatBool(featureFlag.DefaultValue), Public: true, Internal: true, File: false, }) } return variables } func (b *Build) GetSharedEnvVariable() spec.Variable { env := spec.Variable{Value: "true", Public: true, Internal: true, File: false} if b.IsSharedEnv() { env.Key = "CI_SHARED_ENVIRONMENT" } else { env.Key = "CI_DISPOSABLE_ENVIRONMENT" } return env } func (b *Build) GetCITLSVariables() spec.Variables { variables := spec.Variables{} if b.TLSData.CAChain != "" { variables = append(variables, spec.Variable{ Key: tls.VariableCAFile, Value: b.TLSData.CAChain, Public: true, Internal: true, File: true, }) } if b.TLSData.AuthCert != "" && b.TLSData.AuthKey != "" { variables = append( variables, spec.Variable{ Key: tls.VariableCertFile, Value: b.TLSData.AuthCert, Public: true, Internal: true, File: true, }, spec.Variable{ Key: tls.VariableKeyFile, Value: b.TLSData.AuthKey, Internal: true, File: true, }, ) } return variables } func (b *Build) IsSharedEnv() bool { return b.ExecutorFeatures.Shared } // RefreshAllVariables forces the next time all variables are retrieved to discard // any cached results and reconstruct/expand all job variables. func (b *Build) RefreshAllVariables() { b.allVariables = nil b.buildSettings = nil } // getBaseVariablesBeforeJob returns the base variables that come before job variables. func (b *Build) getBaseVariablesBeforeJob() spec.Variables { variables := make(spec.Variables, 0) if b.Image.Name != "" { variables = append( variables, spec.Variable{Key: "CI_JOB_IMAGE", Value: b.Image.Name, Public: true, Internal: true, File: false}, ) } if b.Runner != nil { variables = append(variables, b.Runner.GetVariables()...) } variables = append(variables, b.GetDefaultVariables()...) variables = append(variables, b.GetCITLSVariables()...) return variables } // getBaseVariablesAfterJob returns the base variables that come after job variables. func (b *Build) getBaseVariablesAfterJob() spec.Variables { variables := make(spec.Variables, 0) variables = append(variables, b.GetSharedEnvVariable()) variables = append(variables, AppVersion.Variables()...) variables = append(variables, b.secretsVariables...) variables = append(variables, spec.Variable{ Key: spec.TempProjectDirVariableKey, Value: b.TmpProjectDir(), Public: true, Internal: true, }) if b.IsFeatureFlagOn(featureflags.NetworkPerBuild) { variables = append( variables, spec.Variable{Key: "CI_BUILD_NETWORK_NAME", Value: b.ProjectUniqueShortName(), Public: true, Internal: true, File: false}, ) } return variables } // getVariablesForFeatureFlagResolution returns an initial set of variables that will be used // to resolve feature flag settings. This is used only during initSettings. func (b *Build) getVariablesForFeatureFlagResolution() spec.Variables { variables := make(spec.Variables, 0) variables = append(variables, b.GetDefaultFeatureFlagsVariables()...) variables = append(variables, b.getBaseVariablesBeforeJob()...) variables = append(variables, b.Variables...) variables = append(variables, b.getBaseVariablesAfterJob()...) return variables.Expand() } // getResolvedFeatureFlags returns resolved feature flags with TOML precedence. // This assumes build settings have been initialized. This is // part of the two-phase feature flag resolution process that ensures // TOML settings take precedence over job variables. func (b *Build) getResolvedFeatureFlags() spec.Variables { variables := make(spec.Variables, 0) if b.buildSettings == nil { logrus.Warn("build settings are not initialized") return variables } for _, featureFlag := range featureflags.GetAll() { resolvedValue := b.buildSettings.FeatureFlags[featureFlag.Name] variables = append(variables, spec.Variable{ Key: featureFlag.Name, Value: strconv.FormatBool(resolvedValue), Public: true, Internal: true, File: false, }) } return variables } // getNonFeatureFlagJobVariables gets job variables, excluding feature flags to prevent double inclusion // and to maintain the precedence of TOML-configured feature flags over job variables. func (b *Build) getNonFeatureFlagJobVariables() spec.Variables { featureFlagNames := make(map[string]bool) for _, ff := range featureflags.GetAll() { featureFlagNames[ff.Name] = true } filtered := make(spec.Variables, 0, len(b.Variables)) for _, variable := range b.Variables { if !featureFlagNames[variable.Key] { filtered = append(filtered, variable) } } return filtered } // GetAllVariables() returns final variables with a consistent precedence order: // 1. Resolved feature flags (TOML takes precedence over job variables) // 2. Base variables that come before job variables // 3. Job variables (excluding feature flags to prevent overriding resolved values) // 4. Base variables that come after job variables func (b *Build) GetAllVariables() spec.Variables { if b.allVariables != nil { return b.allVariables } // Phase 1: Ensure feature flags have been resolved. if b.buildSettings == nil { b.Settings() } variables := make(spec.Variables, 0) // Phase 2: Add resolved feature flags first (maintains original precedence order) variables = append(variables, b.getResolvedFeatureFlags()...) variables = append(variables, b.getBaseVariablesBeforeJob()...) variables = append(variables, b.getNonFeatureFlagJobVariables()...) variables = append(variables, b.getBaseVariablesAfterJob()...) b.allVariables = variables.Expand() return b.allVariables } // IsProtected states if the git ref this build is for is protected. // GitLab 18.3+ provides the `protected` property in GitInfo to check if a branch is protected. // For older GitLab versions, we fall back to the CI_COMMIT_REF_PROTECTED predefined variable. func (b *Build) IsProtected() bool { if p := b.GitInfo.Protected; p != nil { return *p } // we dedup the vars here, keeping the original, so that we don't consider an override by the user. return b.GetAllVariables().Dedup(true).Bool("CI_COMMIT_REF_PROTECTED") } // Users might specify image and service-image name and aliases as Variables, so we must expand them before they are // used. func (b *Build) expandContainerOptions() { allVars := b.GetAllVariables() b.Image.Name = allVars.ExpandValue(b.Image.Name) b.Image.Alias = allVars.ExpandValue(b.Image.Alias) for i := range b.Services { b.Services[i].Name = allVars.ExpandValue(b.Services[i].Name) b.Services[i].Alias = allVars.ExpandValue(b.Services[i].Alias) } } // withUrlHelper lazyly sets up the correct url helper, stores it for the rest of the lifetime of the build, and returns // the appropriate url helper. func (b *Build) withUrlHelper() *url_helpers.GitAuthHelper { if b.urlHelper != nil { return b.urlHelper } vars := b.GetAllVariables() b.urlHelper = url_helpers.NewGitAuthHelper(url_helpers.GitAuthConfig{ CloneURL: b.Runner.CloneURL, CredentialsURL: b.Runner.RunnerCredentials.URL, RepoURL: b.GitInfo.RepoURL, GitSubmoduleForceHTTPS: b.Settings().GitSubmoduleForceHTTPS, Token: b.Token, ProjectPath: vars.Value("CI_PROJECT_PATH"), Server: url_helpers.GitAuthServerConfig{ Host: vars.Value("CI_SERVER_HOST"), SSHHost: vars.Value("CI_SERVER_SHELL_SSH_HOST"), SSHPort: vars.Value("CI_SERVER_SHELL_SSH_PORT"), }, }, !b.IsFeatureFlagOn(featureflags.GitURLsWithoutTokens)) return b.urlHelper } // GetRemoteURL uses the urlHelper to get the remote URL used for fetching the repo. func (b *Build) GetRemoteURL() (*url.URL, error) { return b.withUrlHelper().GetRemoteURL() } // GetInsteadOfs uses the urlHelper to generate insteadOf URLs to pass on to git. func (b *Build) GetInsteadOfs() ([][2]string, error) { return b.withUrlHelper().GetInsteadOfs() } type stageTimeout struct { configName string defaultTimeout time.Duration } func (b *Build) getStageTimeoutContexts(parent context.Context, timeouts ...stageTimeout) map[string]func() (context.Context, func()) { stack := make([]time.Duration, len(timeouts)) deadline, hasDeadline := parent.Deadline() jobTimeout := time.Until(deadline) for idx, timeout := range timeouts { stack[idx] = timeout.defaultTimeout rawTimeout := b.GetAllVariables().Value(timeout.configName) duration, parseErr := time.ParseDuration(rawTimeout) switch { case strings.TrimSpace(rawTimeout) == "": // no-op case parseErr != nil: b.logger.Warningln(fmt.Sprintf("Ignoring malformed %s timeout: %v", timeout.configName, rawTimeout)) case duration < 0: // no relative durations for now... b.logger.Warningln(fmt.Sprintf("Ignoring relative %s timeout: %v", timeout.configName, rawTimeout)) case hasDeadline && duration > jobTimeout: // clamping timeouts to the job timeout happens automatically in `context.WithParent()`, mention it here b.logger.Warningln(fmt.Sprintf("%s timeout: %v is longer than job timeout. Setting to job timeout", timeout.configName, rawTimeout)) case duration != 0: stack[idx] = duration } } results := make(map[string]func() (context.Context, func())) for idx, timeout := range timeouts { switch { case stack[idx] == 0: results[timeout.configName] = func() (context.Context, func()) { // no timeout return context.WithCancel(parent) } case stack[idx] > 0: duration := stack[idx] results[timeout.configName] = func() (context.Context, func()) { // absolute timeout return context.WithTimeout(parent, duration) } } } return results } func (b *Build) GetGitStrategy() GitStrategy { return b.Settings().GitStrategy } func (b *Build) GetRepositoryObjectFormat() string { if b.GitInfo.RepoObjectFormat == "" { return DefaultObjectFormat } return b.GitInfo.RepoObjectFormat } func (b *Build) GetGitCheckout() bool { if b.GetGitStrategy() == GitNone || b.GetGitStrategy() == GitEmpty { return false } return b.Settings().GitCheckout } func (b *Build) GetSubmoduleStrategy() SubmoduleStrategy { if b.GetGitStrategy() == GitNone || b.GetGitStrategy() == GitEmpty { return SubmoduleNone } return b.Settings().GitSubmoduleStrategy } // GetSubmodulePaths https://git-scm.com/docs/git-submodule#Documentation/git-submodule.txt-ltpathgt82308203 func (b *Build) GetSubmodulePaths() ([]string, error) { toks := b.Settings().GitSubmodulePaths for _, tok := range toks { if tok == ":(exclude)" { return nil, fmt.Errorf("GIT_SUBMODULE_PATHS: invalid submodule pathspec %q", toks) } } return toks, nil } func (b *Build) GetSubmoduleDepth() int { return b.Settings().GitSubmoduleDepth } func (b *Build) GetGitCleanFlags() []string { return b.Settings().GitCleanFlags } func (b *Build) GetGitCloneFlags() []string { return b.Settings().GitCloneExtraFlags } func (b *Build) GetGitFetchFlags() []string { return b.Settings().GitFetchExtraFlags } func (b *Build) GetGitSubmoduleUpdateFlags() []string { return b.Settings().GitSubmoduleUpdateFlags } func (b *Build) IsDebugTraceEnabled() bool { return b.Settings().CIDebugTrace } func (b *Build) GetDockerAuthConfig() string { return b.Settings().DockerAuthConfig } func (b *Build) GetGetSourcesAttempts() int { return b.Settings().GetSourcesAttempts } func (b *Build) GetDownloadArtifactsAttempts() int { return b.Settings().ArtifactDownloadAttempts } func (b *Build) GetRestoreCacheAttempts() int { return b.Settings().RestoreCacheAttempts } func (b *Build) GetCacheRequestTimeout() int { return b.Settings().CacheRequestTimeout } func (b *Build) GetExecutorJobSectionAttempts() int { return b.Settings().ExecutorJobSectionAttempts } func (b *Build) StartedAt() time.Time { return b.startedAt } func (b *Build) FinishedAt() time.Time { return b.finishedAt } // CurrentDuration presents the duration since when the job was started // to the moment when CurrentDuration was called. To be used in cases, // when we want to check the duration of the job while it's still being // executed func (b *Build) CurrentDuration() time.Duration { return time.Since(b.startedAt) } // FinalDuration presents the total duration of the job since when it was // started to when it was finished. To be used when reporting the final // duration through logs or metrics, for example for billing purposes. func (b *Build) FinalDuration() time.Duration { if b.finishedAt.IsZero() { return time.Duration(0) } return b.finishedAt.Sub(b.startedAt) } func (b *Build) ensureFinishedAt() { b.finishedAt = time.Now() } type urlHelper interface { GetRemoteURL() (*url.URL, error) GetInsteadOfs() ([][2]string, error) } func NewBuild( jobData spec.Job, runnerConfig *RunnerConfig, systemInterrupt chan os.Signal, executorData ExecutorData, executorProvider ExecutorProvider, ) (*Build, error) { // Attempt to perform a deep copy of the RunnerConfig runnerConfigCopy, err := runnerConfig.DeepCopy() if err != nil { return nil, fmt.Errorf("deep copy of runner config failed: %w", err) } return &Build{ Job: jobData, Runner: runnerConfigCopy, SystemInterrupt: systemInterrupt, ExecutorData: executorData, ExecutorProvider: executorProvider, startedAt: time.Now(), secretsResolver: newSecretsResolver, }, nil } func (b *Build) IsFeatureFlagOn(name string) bool { val, ok := b.Settings().FeatureFlags[name] return ok && val } // getFeatureFlagInfo returns the status of feature flags that differ // from their default status. func (b *Build) getFeatureFlagInfo() string { var statuses []string for _, ff := range featureflags.GetAll() { isOn := b.IsFeatureFlagOn(ff.Name) if isOn != ff.DefaultValue { statuses = append(statuses, fmt.Sprintf("%s:%t", ff.Name, isOn)) } } return strings.Join(statuses, ", ") } func (b *Build) printRunningWithHeader(trace JobTrace) { logger := b.getNewLogger(trace, b.Log(), false) defer logger.Close() logger.Println("Running with", AppVersion.Line()) if b.Runner != nil && b.Runner.ShortDescription() != "" { logger.Println(fmt.Sprintf( " on %s %s, system ID: %s", b.Runner.Name, b.Runner.ShortDescription(), b.Runner.SystemID, )) } if featureInfo := b.getFeatureFlagInfo(); featureInfo != "" { logger.Println(" feature flags:", featureInfo) } } func (b *Build) printSettingErrors() { if len(b.Settings().Errors) > 0 { b.logger.Warningln(errors.Join(b.Settings().Errors...)) } } func (b *Build) printPolicyOptions() { if !b.Job.PolicyOptions.PolicyJob { return } b.logger.Infoln(fmt.Sprintf(`Job triggered by policy "%s".`, b.Job.PolicyOptions.Name)) // VariableOverrideAllowed is optional. // If not set, YAML variables defined in the policy are enforced with the highest precedence. if b.Job.PolicyOptions.VariableOverrideAllowed == nil { b.logger.Infoln("Variables defined in the policy take precedence over matching user-defined CI/CD variables for this job.") return } message := "User-defined CI/CD variables are " if *b.Job.PolicyOptions.VariableOverrideAllowed { message += "allowed in this job" } else { message += "ignored in this job" } // VariableOverrideExceptions acts as an allowlist when VariableOverrideExceptions is false // and a denylist when it's true. if b.Job.PolicyOptions.VariableOverrideExceptions != nil { message += fmt.Sprintf(" (except for %s)", strings.Join(b.Job.PolicyOptions.VariableOverrideExceptions, ", ")) } message += " according to the policy." b.logger.Infoln(message) } func (b *Build) IsLFSSmudgeDisabled() bool { return b.Settings().GitLFSSkipSmudge } func (b *Build) IsCIDebugServiceEnabled() bool { return b.Settings().CIDebugServices } func (b *Build) IsDebugModeEnabled() bool { return b.IsDebugTraceEnabled() || b.IsCIDebugServiceEnabled() } ================================================ FILE: common/build_settings.go ================================================ package common import ( "flag" "fmt" "os" "slices" "strconv" "strings" "unicode" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) type GitStrategy string const ( GitClone GitStrategy = "clone" GitFetch GitStrategy = "fetch" GitNone GitStrategy = "none" GitEmpty GitStrategy = "empty" ) type cmdFlags []string var ( gitCleanFlagsDefault = cmdFlags{"-ffdx"} gitFetchFlagsDefault = cmdFlags{"--prune", "--quiet"} ) type SubmoduleStrategy string const ( SubmoduleInvalid SubmoduleStrategy = "invalid" SubmoduleNone SubmoduleStrategy = "none" SubmoduleNormal SubmoduleStrategy = "normal" SubmoduleRecursive SubmoduleStrategy = "recursive" DefaultObjectFormat = "sha1" ) type BuildSettings struct { CIDebugServices bool CIDebugTrace bool GitClonePath string GitCheckout bool GitSubmoduleStrategy SubmoduleStrategy GitStrategy GitStrategy GitSubmodulePaths []string GitSubmoduleDepth int GitCleanFlags cmdFlags GitCloneExtraFlags cmdFlags GitFetchExtraFlags cmdFlags GitSubmoduleUpdateFlags cmdFlags GitLFSSkipSmudge bool GitSubmoduleForceHTTPS bool GetSourcesAttempts int ArtifactDownloadAttempts int RestoreCacheAttempts int ExecutorJobSectionAttempts int AfterScriptIgnoreErrors bool CacheRequestTimeout int DockerAuthConfig string FeatureFlags map[string]bool Errors []error } // Settings returns user provided build settings. func (b *Build) Settings() BuildSettings { b.initSettings() return *b.buildSettings } func (b *Build) initSettings() { if b.buildSettings != nil { return } b.buildSettings = &BuildSettings{} // PHASE 1: Use explicit method for feature flag resolution variablesForResolution := b.getVariablesForFeatureFlagResolution() defaultGitStrategy := GitClone if b.AllowGitFetch { defaultGitStrategy = GitFetch } errs := validateVariables(variablesForResolution, b, defaultGitStrategy) if b.Runner != nil && b.Runner.DebugTraceDisabled { if b.buildSettings.CIDebugTrace { errs = append(errs, fmt.Errorf("CI_DEBUG_TRACE: usage is disabled on this Runner")) } if b.buildSettings.CIDebugServices { errs = append(errs, fmt.Errorf("CI_DEBUG_SERVICES: usage is disabled on this Runner")) } b.buildSettings.CIDebugTrace = false b.buildSettings.CIDebugServices = false } if b.buildSettings.ExecutorJobSectionAttempts < 1 || b.buildSettings.ExecutorJobSectionAttempts > 10 { errs = append(errs, fmt.Errorf("EXECUTOR_JOB_SECTION_ATTEMPTS: number of attempts out of the range [1, 10], using default %v", DefaultExecutorStageAttempts)) b.buildSettings.ExecutorJobSectionAttempts = DefaultExecutorStageAttempts } errs = append(errs, populateFeatureFlags(b, variablesForResolution)...) b.buildSettings.Errors = slices.DeleteFunc(errs, func(err error) bool { return err == nil }) } func validateVariables(variables spec.Variables, b *Build, defaultGitStategy GitStrategy) []error { return []error{ validate(variables, "CI_DEBUG_SERVICES", &b.buildSettings.CIDebugServices, false), validate(variables, "CI_DEBUG_TRACE", &b.buildSettings.CIDebugTrace, false), validate(variables, "GIT_CLONE_PATH", &b.buildSettings.GitClonePath, ""), validate(variables, "GIT_STRATEGY", &b.buildSettings.GitStrategy, defaultGitStategy), validate(variables, "GIT_CHECKOUT", &b.buildSettings.GitCheckout, true), validate(variables, "GIT_SUBMODULE_STRATEGY", &b.buildSettings.GitSubmoduleStrategy, SubmoduleInvalid), validate(variables, "GIT_SUBMODULE_PATHS", &b.buildSettings.GitSubmodulePaths, nil), validate(variables, "GIT_SUBMODULE_DEPTH", &b.buildSettings.GitSubmoduleDepth, b.GitInfo.Depth), validate(variables, "GIT_CLEAN_FLAGS", &b.buildSettings.GitCleanFlags, gitCleanFlagsDefault), validate(variables, "GIT_CLONE_EXTRA_FLAGS", &b.buildSettings.GitCloneExtraFlags, cmdFlags{}), validate(variables, "GIT_FETCH_EXTRA_FLAGS", &b.buildSettings.GitFetchExtraFlags, gitFetchFlagsDefault), validate(variables, "GIT_SUBMODULE_UPDATE_FLAGS", &b.buildSettings.GitSubmoduleUpdateFlags, nil), validate(variables, "GIT_LFS_SKIP_SMUDGE", &b.buildSettings.GitLFSSkipSmudge, false), validate(variables, "GIT_SUBMODULE_FORCE_HTTPS", &b.buildSettings.GitSubmoduleForceHTTPS, false), validate(variables, "GET_SOURCES_ATTEMPTS", &b.buildSettings.GetSourcesAttempts, DefaultGetSourcesAttempts), validate(variables, "ARTIFACT_DOWNLOAD_ATTEMPTS", &b.buildSettings.ArtifactDownloadAttempts, DefaultArtifactDownloadAttempts), validate(variables, "RESTORE_CACHE_ATTEMPTS", &b.buildSettings.RestoreCacheAttempts, DefaultRestoreCacheAttempts), validate(variables, "EXECUTOR_JOB_SECTION_ATTEMPTS", &b.buildSettings.ExecutorJobSectionAttempts, DefaultExecutorStageAttempts), validate(variables, "AFTER_SCRIPT_IGNORE_ERRORS", &b.buildSettings.AfterScriptIgnoreErrors, DefaultAfterScriptIgnoreErrors), validate(variables, "CACHE_REQUEST_TIMEOUT", &b.buildSettings.CacheRequestTimeout, DefaultCacheRequestTimeout), validate(variables, "DOCKER_AUTH_CONFIG", &b.buildSettings.DockerAuthConfig, ""), } } func validate[T any](variables spec.Variables, name string, value *T, def T) error { raw := variables.Value(name) var err error switch v := any(value).(type) { case *SubmoduleStrategy: switch strategy := SubmoduleStrategy(raw); strategy { case SubmoduleNormal, SubmoduleRecursive, SubmoduleNone: *v = strategy case "": *v = SubmoduleNone default: *value = def return fmt.Errorf("%s: expected either 'normal', 'recursive' or 'none' got %q", name, raw) } return nil case *GitStrategy: switch strategy := GitStrategy(raw); strategy { case GitClone, GitFetch, GitNone, GitEmpty: *v = strategy case "": *value = def default: *value = def return fmt.Errorf("%s: expected either 'clone', 'fetch', 'none' or 'empty' got %q, using default value '%v'", name, raw, def) } return nil } // all cases below use a default when the value is empty if raw == "" { *value = def return nil } switch v := any(value).(type) { case *bool: *v, err = strconv.ParseBool(raw) if err != nil { *value = def return fmt.Errorf("%s: expected bool got %q, using default value: %v", name, raw, def) } case *int: i, err := strconv.ParseInt(raw, 10, 64) *v = int(i) if err != nil { *value = def return fmt.Errorf("%s: expected int got %q, using default value: %v", name, raw, def) } case *string: *v = raw case *cmdFlags: switch raw { case "none": *v = cmdFlags{} default: *v = cmdFlags(strings.Fields(raw)) } case *[]string: *v = strings.Fields(raw) } return nil } //nolint:gocognit func populateFeatureFlags(b *Build, variables spec.Variables) []error { var errs []error // test mode only: in tests, we provide a mechanism for providing // feature flags via RUNNER_TEST_FEATURE_FLAGS, if the flag is present, // we treat it as a toggle to the default flag value. var testFlags []string if flag.Lookup("test.v") != nil { testFlags = strings.FieldsFunc(os.Getenv("RUNNER_TEST_FEATURE_FLAGS"), func(r rune) bool { return r == ',' || unicode.IsSpace(r) }) } b.buildSettings.FeatureFlags = make(map[string]bool) for _, ff := range featureflags.GetAll() { b.buildSettings.FeatureFlags[ff.Name] = ff.DefaultValue if len(testFlags) > 0 { if slices.Contains(testFlags, ff.Name) { b.buildSettings.FeatureFlags[ff.Name] = !ff.DefaultValue continue } } // runner setting takes precedence if defined if b.Runner != nil && b.Runner.FeatureFlags != nil { val, ok := b.Runner.FeatureFlags[ff.Name] if ok { b.buildSettings.FeatureFlags[ff.Name] = val continue } } // if job variable is valid it can override default raw := variables.Get(ff.Name) val, err := strconv.ParseBool(raw) if err != nil { errs = append(errs, fmt.Errorf("%v: could not parse feature flag, expected bool, got %v", ff.Name, raw)) } else { b.buildSettings.FeatureFlags[ff.Name] = val } } return errs } ================================================ FILE: common/build_settings_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) // For https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37386 func TestBuildVariablesAsFileType(t *testing.T) { tests := map[string]bool{ "file vars": true, "regular vars": false, } for name, isFileType := range tests { t.Run(name, func(t *testing.T) { vars := []spec.Variable{ {Key: "DOCKER_AUTH_CONFIG", Value: "foobarbaz", File: isFileType}, {Key: "GIT_CLONE_PATH", Value: "/root/dir/foobarbaz", File: isFileType}, {Key: "GIT_SUBMODULE_STRATEGY", Value: "recursive", File: isFileType}, } build := runSuccessfulMockBuild(t, func(options ExecutorPrepareOptions) error { options.Build.Variables = append(options.Build.Variables, vars...) return options.Build.StartBuild("/root/dir", "/cache/dir", true, false, false) }) assert.Equal(t, "foobarbaz", build.Settings().DockerAuthConfig) assert.Equal(t, "/root/dir/foobarbaz", build.Settings().GitClonePath) assert.Equal(t, SubmoduleRecursive, build.Settings().GitSubmoduleStrategy) }) } } ================================================ FILE: common/build_step_dispatch.go ================================================ package common import ( "context" "fmt" "gitlab.com/gitlab-org/gitlab-runner/cache" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/functions/concrete/builder" "gitlab.com/gitlab-org/gitlab-runner/functions/concrete/run/cacheprovider" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/moa" "gitlab.com/gitlab-org/step-runner/schema/v1" ) const stepRunBuildStage = BuildStage("step_" + spec.StepNameRun) // stepDispatch converts a build stage to a list of steps to run. // // Depending on the configuration, this can also include stages that we're // in the process of migrating (from scripts) to a step. // //nolint:gocognit func stepDispatch(build *Build, executor Executor, stage BuildStage) (bool, []schema.Step) { switch stage { case BuildStagePrepare, BuildStageGetSources, BuildStageClearWorktree, BuildStageRestoreCache, BuildStageDownloadArtifacts, BuildStageArchiveOnSuccessCache, BuildStageArchiveOnFailureCache, BuildStageUploadOnFailureArtifacts, BuildStageUploadOnSuccessArtifacts, BuildStageCleanup: // don't handle non-user script stages return false, nil case stepRunBuildStage: return true, build.Job.Run case BuildStageAfterScript: // don't handle after_script (yet) return false, nil default: // user script if !build.IsFeatureFlagOn(featureflags.UseScriptToStepMigration) { return false, nil } shell := executor.Shell() if shell == nil { return false, nil } var script []string if shell.PreBuildScript != "" { script = append(script, shell.PreBuildScript) } for _, step := range build.Steps { if StepToBuildStage(step) == stage { script = append(script, step.Script...) if step.Name == "release" { for i, s := range step.Script { script[i] = build.GetAllVariables().ExpandValue(s) } } break } } if shell.PostBuildScript != "" { script = append(script, shell.PostBuildScript) } // if no script, no-op if len(script) == 0 { return true, nil // handled, but nothing to do } return true, []schema.Step{ { Name: func(s string) *string { return &s }("user_script"), Step: "builtin://script_legacy", Inputs: schema.StepInputs{ "script": script, "debug_trace": build.IsDebugTraceEnabled(), "posix_escape": true, "check_for_errors": build.IsFeatureFlagOn(featureflags.EnableBashExitCodeCheck), "trace_sections": build.IsFeatureFlagOn(featureflags.ScriptSections), }, }, } } } //nolint:gocognit func stagesToConcreteStep(ctx context.Context, executor Executor) ([]schema.Step, error) { info := executor.Shell() if info == nil { return nil, fmt.Errorf("no shell defined for executor") } build := info.Build var opts []builder.Option opts = append(opts, builder.WithExecutorName(build.Runner.Executor), builder.WithRunnerName(build.Runner.Name), builder.WithStartedAt(build.startedAt), builder.WithDebug(build.IsDebugTraceEnabled()), builder.WithCloneURL(build.Runner.CloneURL), builder.WithShell(info.Shell), builder.WithLoginShell(info.Type == LoginShell), builder.WithCacheDir(build.CacheDir), builder.WithSafeDirectoryCheckout(build.SafeDirectoryCheckout), builder.WithArtifactTimeouts( build.Runner.Artifact.GetUploadTimeout(), build.Runner.Artifact.GetResponseHeaderTimeout(), ), builder.WithPreBuildScript([]string{info.PreBuildScript}), builder.WithPostBuildScript([]string{info.PostBuildScript}), builder.WithPreCloneScript(func() []string { var s []string if info.PreGetSourcesScript != "" { s = append(s, info.PreGetSourcesScript) } h := info.Build.Hooks.Get(spec.HookPreGetSourcesScript) if len(h.Script) > 0 { s = append(s, h.Script...) } return s }()), builder.WithPostCloneScript(func() []string { var s []string h := info.Build.Hooks.Get(spec.HookPostGetSourcesScript) if len(h.Script) > 0 { s = append(s, h.Script...) } if info.PostGetSourcesScript != "" { s = append(s, info.PostGetSourcesScript) } return s }()), builder.WithGitCleanConfig(func() bool { // It's by default disabled for the shell executor or when the git // strategy is "none", and enabled otherwise; explicit // configuration however always has precedence. if build.Runner.CleanGitConfig != nil { return *build.Runner.CleanGitConfig } switch build.Runner.Executor { case "shell", "shell-integration-test": return false default: return true } }()), builder.WithGitalyCorrelationID(build.JobRequestCorrelationID), builder.WithUserAgent(fmt.Sprintf("%s %s %s/%s", AppVersion.Name, AppVersion.Version, AppVersion.OS, AppVersion.Architecture)), ) //nolint:nestif if build.Runner.Cache != nil { opts = append(opts, builder.WithCacheMaxArchiveSize(build.Runner.Cache.MaxUploadedArchiveSize), builder.WithCacheDownloadDescriptor(func(cacheKey string) (cacheprovider.Descriptor, error) { adapter := cache.GetAdapter(build.Runner.Cache, build.GetBuildTimeout(), build.Runner.ShortDescription(), fmt.Sprintf("%d", build.JobInfo.ProjectID), cacheKey, build.IsFeatureFlagOn(featureflags.HashCacheKeys)) goCloudURL, err := adapter.GetGoCloudURL(ctx, false) if goCloudURL.URL != nil { return cacheprovider.Descriptor{ GoCloudURL: true, URL: goCloudURL.URL.String(), Env: goCloudURL.Environment, }, err } if url := adapter.GetDownloadURL(ctx); url.URL != nil { return cacheprovider.Descriptor{ URL: url.URL.String(), Headers: url.Headers, }, nil } return cacheprovider.Descriptor{}, nil }), builder.WithCacheUploadDescriptor(func(cacheKey string) (cacheprovider.Descriptor, error) { adapter := cache.GetAdapter(build.Runner.Cache, build.GetBuildTimeout(), build.Runner.ShortDescription(), fmt.Sprintf("%d", build.JobInfo.ProjectID), cacheKey, build.IsFeatureFlagOn(featureflags.HashCacheKeys)) goCloudURL, err := adapter.GetGoCloudURL(ctx, true) if err != nil { return cacheprovider.Descriptor{}, err } if goCloudURL.URL != nil { return cacheprovider.Descriptor{ GoCloudURL: true, URL: goCloudURL.URL.String(), Env: goCloudURL.Environment, }, err } url := adapter.GetUploadURL(ctx) if url.URL == nil { return cacheprovider.Descriptor{}, err } desc := cacheprovider.Descriptor{ URL: url.URL.String(), Headers: url.Headers, } if headURL := adapter.GetHeadURL(ctx); headURL.URL != nil { desc.HeadURL = headURL.URL.String() } return desc, nil }), ) } concrete, err := builder.Build(build.Job, build.GetAllVariables(), opts...) if err != nil { return nil, err } return []schema.Step{ { Name: func(s string) *string { return &s }("concrete"), Step: "builtin://concrete", Inputs: schema.StepInputs{ "config": moa.EscapeTemplate(string(concrete)), }, }, }, nil } ================================================ FILE: common/build_step_dispatch_test.go ================================================ //go:build !integration package common import ( "encoding/json" "fmt" "testing" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func TestBuildConcreteKitchenSink(t *testing.T) { build := Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: "shell", PreGetSourcesScript: "echo 'pre get sources'", PostGetSourcesScript: "echo 'post get sources'", PreBuildScript: "echo 'pre build sources'", PostBuildScript: "echo 'post build sources'", CloneURL: "https://example.com/override.git", }, }, Job: spec.Job{ ID: 123456789, Token: "test-job-token", Hooks: spec.Hooks{ {Name: spec.HookPreGetSourcesScript, Script: []string{"echo 'job pre get sources script'"}}, {Name: spec.HookPostGetSourcesScript, Script: []string{"echo 'job post get sources script'"}}, }, Variables: spec.Variables{ spec.Variable{Key: "A_BASIC_VAR", Value: "BASIC"}, }, GitInfo: GetGitInfo(repoRemoteURL), Steps: spec.Steps{ spec.Step{ Name: spec.StepNameScript, Script: []string{"echo 'script'"}, When: spec.StepWhenAlways, AllowFailure: false, }, spec.Step{ Name: "release", Script: []string{"echo 'release'"}, When: spec.StepWhenOnSuccess, }, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{"echo 'after_script'"}, When: spec.StepWhenAlways, }, }, Artifacts: spec.Artifacts{ { Name: "", Untracked: true, Paths: []string{"file1", "file2"}, }, { Name: "dotenv", Paths: []string{"dotenv"}, Format: spec.ArtifactFormatRaw, Type: "dotenv", ExpireIn: "7 days", When: spec.ArtifactWhenOnFailure, }, }, Cache: spec.Caches{ { Key: "foobar", Policy: spec.CachePolicyPullPush, Paths: []string{"cache_me_if_you_can"}, When: spec.CacheWhenAlways, }, }, RunnerInfo: spec.RunnerInfo{ Timeout: DefaultTimeout, }, }, } executor := NewMockExecutor(t) executor.EXPECT().Shell().RunAndReturn(func() *ShellScriptInfo { return &ShellScriptInfo{ Shell: "bash", Build: &build, Type: NormalShell, PreGetSourcesScript: build.Runner.PreGetSourcesScript, PostGetSourcesScript: build.Runner.PostGetSourcesScript, PreBuildScript: build.Runner.PreBuildScript, PostBuildScript: build.Runner.PostBuildScript, } }) expectedJSON := fmt.Sprintf(`{ "after_script_ignore_errors": true, "after_script_timeout": 300000000000, "artifacts_archive": [ { "compression_level": "default", "on_success": true, "paths": ["file1", "file2"], "response_header_timeout": 600000000000, "timeout": 3600000000000, "untracked": true }, { "artifact_name": "dotenv", "compression_level": "default", "expire_in": "7 days", "format": "raw", "on_failure": true, "paths": ["dotenv"], "response_header_timeout": 600000000000, "timeout": 3600000000000, "type": "dotenv" } ], "cache_archive": [ { "compression_level": "default", "descriptor": {}, "key": "foobar", "name": "foobar", "on_failure": true, "on_success": true, "paths": ["cache_me_if_you_can"], "timeout": 10 } ], "cache_extract": [ { "max_attempts": 1, "paths": ["cache_me_if_you_can"], "sources": [ { "descriptor": {}, "key": "foobar", "name": "foobar" } ], "timeout": 10 } ], "cleanup": { "git_clean_flags": ["-ffdx"], "git_strategy": "clone", "submodule_strategy": "none" }, "get_sources": { "checkout": true, "clear_worktree_on_retry": true, "git_clean_flags": ["-ffdx"], "git_fetch_flags": ["--prune", "--quiet"], "git_strategy": "clone", "instead_ofs": [ [ "https://gitlab-ci-token:test-job-token@example.com/override.git", "https://example.com/override.git" ], [ "https://gitlab-ci-token:test-job-token@gitlab.com", "https://gitlab.com" ] ], "max_attempts": 1, "object_format": "sha1", "post_clone_step": { "on_success": true, "script": [ "echo 'job post get sources script'", "echo 'post get sources'" ], "step": "post_clone_script" }, "pre_clone_step": { "on_success": true, "script": ["echo 'pre get sources'", "echo 'job pre get sources script'"], "step": "pre_clone_script" }, "ref": "main", "refspecs": [ "+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*" ], "remote_host": "https://example.com", "repo_url": "https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test.git", "sha": "69b18e5ed3610cf646119c3e38f462c64ec462b7", "submodule_strategy": "none", "use_bundled_uris": true, "user_agent": "%s %s %s/%s" }, "id": 123456789, "shell": "bash", "steps": [ { "on_failure": true, "on_success": true, "script": [ "echo 'pre build sources'", "echo 'script'", "echo 'post build sources'" ], "step": "script" }, { "on_success": true, "script": [ "echo 'pre build sources'", "echo 'release'", "echo 'post build sources'" ], "step": "release" }, { "allow_failure": true, "on_failure": true, "on_success": true, "script": ["echo 'after_script'"], "step": "after_script" } ], "timeout": 7200000000000, "token": "test-job-token" } `, AppVersion.Name, AppVersion.Version, AppVersion.OS, AppVersion.Architecture) schema, err := stagesToConcreteStep(t.Context(), executor) require.NoError(t, err) require.Equal(t, 1, len(schema)) var a, b any require.NoError(t, json.Unmarshal([]byte(expectedJSON), &a)) require.NoError(t, json.Unmarshal([]byte(schema[0].Inputs["config"].(string)), &b)) msg, _ := json.MarshalIndent(b, "", " ") require.Equal(t, a, b, string(msg)) } ================================================ FILE: common/build_test.go ================================================ //go:build !integration package common import ( "bytes" "context" "errors" "fmt" "net/http" "net/http/httptest" "net/url" "os" "path/filepath" "strconv" "strings" "testing" "time" "github.com/gorilla/websocket" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/session" "gitlab.com/gitlab-org/gitlab-runner/session/terminal" "gitlab.com/gitlab-org/gitlab-runner/steps" "gitlab.com/gitlab-org/gitlab-runner/steps/stepstest" "gitlab.com/gitlab-org/moa/value" "gitlab.com/gitlab-org/step-runner/pkg/api/client" "gitlab.com/gitlab-org/step-runner/schema/v1" ) func init() { s := MockShell{} s.On("GetName").Return("script-shell") s.On("IsDefault").Return(false).Maybe() s.On("GenerateScript", mock.Anything, mock.Anything, mock.Anything).Return("script", nil) RegisterShell(&s) } func TestBuildPredefinedVariables(t *testing.T) { for _, rootDir := range []string{"/root/dir1", "/root/dir2"} { t.Run(rootDir, func(t *testing.T) { build := runSuccessfulMockBuild(t, func(options ExecutorPrepareOptions) error { return options.Build.StartBuild(rootDir, "/cache/dir", false, false, false) }) projectDir := build.GetAllVariables().Value("CI_PROJECT_DIR") assert.NotEmpty(t, projectDir, "should have CI_PROJECT_DIR") }) } } func TestBuildTimeoutExposed(t *testing.T) { const testTimeout = 180 tests := map[string]struct { forceDefault bool customTimeout int expectedTimeout int }{ "no timeout specified": { forceDefault: true, expectedTimeout: DefaultTimeout, }, "timeout with arbitrary value": { customTimeout: testTimeout, expectedTimeout: testTimeout, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { build := runSuccessfulMockBuild(t, func(options ExecutorPrepareOptions) error { if !tt.forceDefault { options.Build.RunnerInfo.Timeout = tt.customTimeout } return options.Build.StartBuild("/root/dir", "/cache/dir", false, false, false) }) exposedTimeout, err := strconv.Atoi(build.GetAllVariables().Value("CI_JOB_TIMEOUT")) require.NoError(t, err) assert.Equal(t, exposedTimeout, tt.expectedTimeout) }) } } func TestGetPrepareTimeout(t *testing.T) { tests := map[string]struct { runnerConfig *RunnerConfig jobTimeout int // in seconds, matching RunnerInfo.Timeout expectedTimeout time.Duration }{ "nil runner config": { runnerConfig: nil, jobTimeout: 600, expectedTimeout: 600 * time.Second, }, "nil prepare_timeout": { runnerConfig: &RunnerConfig{}, jobTimeout: 600, expectedTimeout: 600 * time.Second, }, "prepare_timeout is valid": { runnerConfig: &RunnerConfig{ RunnerSettings: RunnerSettings{ PrepareTimeout: func() *time.Duration { d := 300 * time.Second; return &d }(), }, }, jobTimeout: 600, expectedTimeout: 300 * time.Second, }, "prepare_timeout equals job timeout": { runnerConfig: &RunnerConfig{ RunnerSettings: RunnerSettings{ PrepareTimeout: func() *time.Duration { d := 600 * time.Second; return &d }(), }, }, jobTimeout: 600, expectedTimeout: 600 * time.Second, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: tt.runnerConfig, } build.RunnerInfo.Timeout = tt.jobTimeout assert.Equal(t, tt.expectedTimeout, build.GetPrepareTimeout()) }) } warningTests := map[string]struct { prepareTimeout time.Duration jobTimeout int // in seconds, matching RunnerInfo.Timeout expectedTimeout time.Duration expectedWarning string }{ "prepare_timeout is zero": { prepareTimeout: 0, jobTimeout: 600, expectedTimeout: 600 * time.Second, expectedWarning: "prepare_timeout (0s) must be greater than 0; using job timeout (10m0s)", }, "prepare_timeout is negative": { prepareTimeout: -1 * time.Second, jobTimeout: 600, expectedTimeout: 600 * time.Second, expectedWarning: "prepare_timeout (-1s) must be greater than 0; using job timeout (10m0s)", }, "prepare_timeout exceeds job timeout": { prepareTimeout: 601 * time.Second, jobTimeout: 600, expectedTimeout: 600 * time.Second, expectedWarning: "prepare_timeout (10m1s) exceeds job timeout (10m0s); using job timeout", }, } for name, tt := range warningTests { t.Run(name, func(t *testing.T) { logger, hook := test.NewNullLogger() build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Logger: logger, }, RunnerSettings: RunnerSettings{ PrepareTimeout: &tt.prepareTimeout, }, }, } build.RunnerInfo.Timeout = tt.jobTimeout assert.Equal(t, tt.expectedTimeout, build.GetPrepareTimeout()) require.NotNil(t, hook.LastEntry()) assert.Equal(t, tt.expectedWarning, hook.LastEntry().Message) }) } } func matchBuildStage(buildStage BuildStage) interface{} { return mock.MatchedBy(func(cmd ExecutorCommand) bool { return cmd.Stage == buildStage }) } func TestBuildRun(t *testing.T) { runSuccessfulMockBuild(t, func(options ExecutorPrepareOptions) error { return nil }) } func TestBuildPanic(t *testing.T) { panicFn := func(mock.Arguments) { panic("panic message") } tests := map[string]struct { setupMockExecutor func(*MockExecutor) }{ "prepare": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Run(panicFn).Once() }, }, "run": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Once() executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Run(panicFn).Once() executor.On("Cleanup").Once() }, }, "cleanup": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Once() executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Once() executor.On("Cleanup").Run(panicFn).Once() }, }, "shell": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Once() executor.On("Shell").Run(panicFn) executor.On("Cleanup").Once() }, }, "run+cleanup": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Once() executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Run(panicFn).Once() executor.On("Cleanup").Run(panicFn).Once() }, }, "finish": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Run(panicFn).Once() executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Once() executor.On("Cleanup").Once() }, }, "finish+cleanup+shell": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Run(panicFn).Once() executor.On("Shell").Run(panicFn).Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Cleanup").Run(panicFn).Once() }, }, "run+finish+cleanup": { setupMockExecutor: func(executor *MockExecutor) { executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Finish", mock.Anything).Run(panicFn).Once() executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Run(panicFn).Once() executor.On("Cleanup").Run(panicFn).Once() }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) tt.setupMockExecutor(executor) res, err := GetSuccessfulBuild() require.NoError(t, err) cfg := &RunnerConfig{} cfg.Executor = t.Name() build, err := NewBuild(res, cfg, nil, nil, provider) require.NoError(t, err) var out bytes.Buffer err = build.Run(&Config{}, &Trace{Writer: &out}) assert.EqualError(t, err, "panic: panic message") assert.Contains(t, out.String(), "panic: panic message") }) } } func TestJobImageExposed(t *testing.T) { tests := map[string]struct { image string vars []spec.Variable expectVarExists bool expectImageName string }{ "normal image exposed": { image: "alpine:3.14", expectVarExists: true, expectImageName: "alpine:3.14", }, "image with variable expansion": { image: "${IMAGE}:3.14", vars: []spec.Variable{{Key: "IMAGE", Value: "alpine", Public: true}}, expectVarExists: true, expectImageName: "alpine:3.14", }, "no image specified": { image: "", expectVarExists: false, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { build := runSuccessfulMockBuild(t, func(options ExecutorPrepareOptions) error { options.Build.Image.Name = tt.image options.Build.Variables = append(options.Build.Variables, tt.vars...) return options.Build.StartBuild("/root/dir", "/cache/dir", false, false, false) }) actualVarExists := false for _, v := range build.GetAllVariables() { if v.Key == "CI_JOB_IMAGE" { actualVarExists = true break } } assert.Equal(t, tt.expectVarExists, actualVarExists, "CI_JOB_IMAGE exported?") if tt.expectVarExists { actualJobImage := build.GetAllVariables().Value("CI_JOB_IMAGE") assert.Equal(t, tt.expectImageName, actualJobImage) } }) } } func TestBuildRunNoModifyConfig(t *testing.T) { expectHostAddr := "10.0.0.1" p := setupSuccessfulMockExecutor(t, func(options ExecutorPrepareOptions) error { options.Config.Docker.Credentials.Host = "10.0.0.2" return nil }) rc := &RunnerConfig{ RunnerSettings: RunnerSettings{ Docker: &DockerConfig{ Credentials: docker.Credentials{ Host: expectHostAddr, }, }, }, } build := registerExecutorWithSuccessfulBuild(t, p, rc) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.NoError(t, err) assert.Equal(t, expectHostAddr, rc.Docker.Credentials.Host) } func TestRetryPrepare(t *testing.T) { PreparationRetryInterval = 0 e := NewMockExecutor(t) p := NewMockExecutorProvider(t) p.On("GetFeatures", mock.Anything).Return(nil).Once() p.On("Create").Return(e).Times(3) // Prepare plan e.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(errors.New("prepare failed")).Twice() e.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() e.On("Cleanup").Times(3) // Succeed a build script e.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) e.On("Run", mock.Anything).Return(nil) e.On("Finish", nil).Once() build := registerExecutorWithSuccessfulBuild(t, p, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestPrepareFailure(t *testing.T) { PreparationRetryInterval = 0 e := NewMockExecutor(t) p := NewMockExecutorProvider(t) p.On("GetFeatures", mock.Anything).Return(nil).Once() p.On("Create").Return(e).Times(3) // Prepare plan e.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(errors.New("prepare failed")).Times(3) e.On("Cleanup").Times(3) build := registerExecutorWithSuccessfulBuild(t, p, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "prepare failed") } func TestPrepareFailureOnBuildError(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(&BuildError{}).Once() executor.On("Cleanup").Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) expectedErr := new(BuildError) assert.ErrorIs(t, err, expectedErr) } func TestPrepareEnvironmentFailure(t *testing.T) { testErr := errors.New("test-err") e := NewMockExecutor(t) p := NewMockExecutorProvider(t) p.On("GetFeatures", mock.Anything).Return(nil).Once() p.On("Create").Return(e).Once() e.On("Prepare", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() e.On("Cleanup").Once() e.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) e.On("Run", matchBuildStage(BuildStagePrepare)).Return(testErr).Once() e.On("Finish", mock.Anything).Once() successfulBuild, err := GetSuccessfulBuild() assert.NoError(t, err) build := &Build{ Job: successfulBuild, Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: "build-run-prepare-environment-failure-on-build-error", }, }, ExecutorProvider: p, } err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.ErrorIs(t, err, testErr) } func TestJobFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Set up a failing a build script thrownErr := &BuildError{Inner: errors.New("test error"), ExitCode: 1} executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", mock.Anything).Return(thrownErr).Times(3) executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", thrownErr).Once() failedBuild, err := GetFailedBuild() assert.NoError(t, err) build := &Build{ Job: failedBuild, Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: "build-run-job-failure", }, }, ExecutorProvider: provider, } trace := NewMockLightJobTrace(t) trace.On("IsStdout").Return(true) trace.On("SetCancelFunc", mock.Anything).Once() trace.On("SetAbortFunc", mock.Anything).Once() trace.On("SetSupportedFailureReasonMapper", mock.Anything).Once() trace.On("Fail", thrownErr, JobFailureData{Reason: "", ExitCode: 1, Mode: JobExecutionModeTraditional}).Return(nil).Once() err = build.Run(&Config{}, trace) expectedErr := new(BuildError) assert.ErrorIs(t, err, expectedErr) } func TestJobFailureOnExecutionTimeout(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Succeed a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage("step_script")).Run(func(mock.Arguments) { time.Sleep(2 * time.Second) }).Return(nil) executor.On("Run", mock.Anything).Return(nil) executor.On("Finish", mock.Anything).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Job.RunnerInfo.Timeout = 1 trace := NewMockLightJobTrace(t) trace.On("IsStdout").Return(true) trace.On("SetCancelFunc", mock.Anything).Twice() trace.On("SetAbortFunc", mock.Anything).Once() trace.On("SetSupportedFailureReasonMapper", mock.Anything).Once() trace.On("Fail", mock.Anything, JobFailureData{Reason: JobExecutionTimeout, Mode: JobExecutionModeTraditional}).Run(func(arguments mock.Arguments) { assert.Error(t, arguments.Get(0).(error)) }).Return(nil).Once() err := build.Run(&Config{}, trace) expectedErr := &BuildError{FailureReason: JobExecutionTimeout} assert.ErrorIs(t, err, expectedErr) } func TestRunFailureRunsAfterScriptAndArtifactsOnFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage("step_script")).Return(errors.New("build fail")).Once() executor.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnFailureArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageArchiveOnFailureCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("build fail")).Once() failedBuild, err := GetFailedBuild() assert.NoError(t, err) build := &Build{ Job: failedBuild, Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: "build-run-run-failure", }, }, ExecutorProvider: provider, } err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "build fail") } func TestGetSourcesRunFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() for attempt := 0; attempt < 10; attempt++ { if attempt == 0 { executor.On("Run", matchBuildStage(BuildStageClearWorktree)).Return(nil) } executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(errors.New("build fail")) } executor.On("Run", matchBuildStage(BuildStageArchiveOnFailureCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnFailureArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("build fail")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Variables = append(build.Variables, spec.Variable{Key: "GET_SOURCES_ATTEMPTS", Value: "3"}) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "build fail") } func TestArtifactDownloadRunFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(errors.New("build fail")).Times(3) executor.On("Run", matchBuildStage(BuildStageArchiveOnFailureCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnFailureArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("build fail")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Variables = append(build.Variables, spec.Variable{Key: "ARTIFACT_DOWNLOAD_ATTEMPTS", Value: "3"}) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "build fail") } func TestArtifactUploadRunFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Successful build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Times(9) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage("step_script")).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageArchiveOnSuccessCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnSuccessArtifacts)).Return(errors.New("upload fail")).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("upload fail")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) successfulBuild := build.Job successfulBuild.Artifacts = make(spec.Artifacts, 1) successfulBuild.Artifacts[0] = spec.Artifact{ Name: "my-artifact", Untracked: false, Paths: spec.ArtifactPaths{"cached/*"}, When: spec.ArtifactWhenAlways, } err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "upload fail") } func TestArchiveCacheOnScriptFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Times(9) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage("step_script")).Return(errors.New("script failure")).Once() executor.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageArchiveOnFailureCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnFailureArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("script failure")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "script failure") } func TestUploadArtifactsOnArchiveCacheFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Successful build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Times(9) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage("step_script")).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageArchiveOnSuccessCache)).Return(errors.New("cache failure")).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnSuccessArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("cache failure")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "cache failure") } func TestRestoreCacheRunFailure(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(errors.New("build fail")).Times(3) executor.On("Run", matchBuildStage(BuildStageArchiveOnFailureCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnFailureArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", errors.New("build fail")).Once() build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Variables = append(build.Variables, spec.Variable{Key: "RESTORE_CACHE_ATTEMPTS", Value: "3"}) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "build fail") } func TestRunWrongAttempts(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Fail a build script executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", mock.Anything).Return(nil).Once() executor. On("Run", mock.Anything). Return(errors.New("number of attempts out of the range [1, 10] for stage: get_sources")) executor.On( "Finish", errors.New("number of attempts out of the range [1, 10] for stage: get_sources"), ) build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Variables = append(build.Variables, spec.Variable{Key: "GET_SOURCES_ATTEMPTS", Value: "0"}) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.EqualError(t, err, "number of attempts out of the range [1, 10] for stage: get_sources") } func TestRunSuccessOnSecondAttempt(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) // We run everything once executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once() executor.On("Finish", mock.Anything).Once() executor.On("Cleanup").Once() // Run script successfully executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) var getSourcesRunAttempts int executor.On("Run", mock.Anything).Return(func(cmd ExecutorCommand) error { if cmd.Stage == BuildStageGetSources { getSourcesRunAttempts++ if getSourcesRunAttempts == 1 { return errors.New("build fail") } } return nil }) build := registerExecutorWithSuccessfulBuild(t, provider, new(RunnerConfig)) build.Variables = append(build.Variables, spec.Variable{Key: "GET_SOURCES_ATTEMPTS", Value: "3"}) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.NoError(t, err) assert.Equal(t, 2, getSourcesRunAttempts) } func TestDebugTrace(t *testing.T) { testCases := map[string]struct { debugTraceVariableValue string expectedValue bool debugTraceFeatureDisabled bool expectedLogOutput string }{ "variable not set": { expectedValue: false, }, "variable set to false": { debugTraceVariableValue: "false", expectedValue: false, }, "variable set to true": { debugTraceVariableValue: "true", expectedValue: true, }, "variable set to a non-bool value": { debugTraceVariableValue: "xyz", expectedValue: false, }, "variable set to true and feature disabled from configuration": { debugTraceVariableValue: "true", expectedValue: false, debugTraceFeatureDisabled: true, expectedLogOutput: "CI_DEBUG_TRACE: usage is disabled on this Runner", }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { build := &Build{ Job: spec.Job{ Variables: spec.Variables{}, }, Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ DebugTraceDisabled: testCase.debugTraceFeatureDisabled, }, }, } if testCase.debugTraceVariableValue != "" { build.Variables = append( build.Variables, spec.Variable{Key: "CI_DEBUG_TRACE", Value: testCase.debugTraceVariableValue, Public: true}, ) } isTraceEnabled := build.IsDebugTraceEnabled() assert.Equal(t, testCase.expectedValue, isTraceEnabled) if testCase.expectedLogOutput != "" { output := errors.Join(build.Settings().Errors...).Error() assert.Contains(t, output, testCase.expectedLogOutput) } }) } } func TestDefaultEnvVariables(t *testing.T) { tests := map[string]struct { buildDir string expectedValue string }{ "UNIX-style BuildDir": { buildDir: "/tmp/test-build/dir", expectedValue: "CI_PROJECT_DIR=/tmp/test-build/dir", }, // The next four tests' expected value will depend on the platform running the tests "Windows UNC-style BuildDir (extended-length path support)": { buildDir: `\\?\C:\tmp\test-build\dir`, expectedValue: "CI_PROJECT_DIR=" + filepath.FromSlash("//?/C:/tmp/test-build/dir"), }, "Windows UNC-style BuildDir": { buildDir: `\\host\share\tmp\test-build\dir`, expectedValue: "CI_PROJECT_DIR=" + filepath.FromSlash("//host/share/tmp/test-build/dir"), }, "Windows-style BuildDir (PS)": { buildDir: `C:\tmp\test-build\dir`, expectedValue: "CI_PROJECT_DIR=" + filepath.FromSlash("C:/tmp/test-build/dir"), }, "Windows-style BuildDir with forward slashes and drive letter": { buildDir: "C:/tmp/test-build/dir", expectedValue: "CI_PROJECT_DIR=" + filepath.FromSlash("C:/tmp/test-build/dir"), }, "Windows-style BuildDir in MSYS bash executor and drive letter)": { buildDir: "/c/tmp/test-build/dir", expectedValue: "CI_PROJECT_DIR=/c/tmp/test-build/dir", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := new(Build) build.BuildDir = test.buildDir vars := build.GetAllVariables().StringList() assert.Contains(t, vars, test.expectedValue) assert.Contains(t, vars, "CI_SERVER=yes") }) } } func TestSharedEnvVariables(t *testing.T) { for _, shared := range [...]bool{true, false} { t.Run(fmt.Sprintf("Value:%v", shared), func(t *testing.T) { assert := assert.New(t) build := Build{ ExecutorFeatures: FeaturesInfo{Shared: shared}, } vars := build.GetAllVariables().StringList() assert.NotNil(vars) present := "CI_SHARED_ENVIRONMENT=true" absent := "CI_DISPOSABLE_ENVIRONMENT=true" if !shared { present, absent = absent, present } assert.Contains(vars, present) assert.NotContains(vars, absent) // we never expose false assert.NotContains(vars, "CI_SHARED_ENVIRONMENT=false") assert.NotContains(vars, "CI_DISPOSABLE_ENVIRONMENT=false") }) } } func TestGetRemoteURL(t *testing.T) { const ( exampleJobToken = "job-token" exampleRepoURL = "http://gitlab-ci-token:job-token@test.remote/my/project.git" exampleProjectPath = "my/project" ) tests := []struct { name string cloneURL string ffEnabled bool expectedURL string }{ { name: "authenticated with CloneURL", cloneURL: "https://test.local/", ffEnabled: false, expectedURL: "https://gitlab-ci-token:job-token@test.local/my/project.git", }, { name: "unauthenticated with CloneURL", cloneURL: "https://test.local/", ffEnabled: true, expectedURL: "https://test.local/my/project.git", }, { name: "authenticated falls back to RepoURL preserving credentials", cloneURL: "", ffEnabled: false, expectedURL: exampleRepoURL, }, { name: "unauthenticated falls back to RepoURL stripping credentials", cloneURL: "", ffEnabled: true, expectedURL: "http://test.remote/my/project.git", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ CloneURL: tt.cloneURL, }, }, Job: spec.Job{ Token: exampleJobToken, GitInfo: spec.GitInfo{ RepoURL: exampleRepoURL, }, Variables: spec.Variables{ {Key: "CI_PROJECT_PATH", Value: exampleProjectPath}, }, }, } build.Runner.FeatureFlags = map[string]bool{ featureflags.GitURLsWithoutTokens: tt.ffEnabled, } remoteURL, err := build.GetRemoteURL() require.NoError(t, err) assert.Equal(t, tt.expectedURL, remoteURL.String()) }) } } func TestGetInsteadOfs(t *testing.T) { const ( exampleJobToken = "job-token" exampleServerHost = "test.local" exampleServerURL = "https://test.local" ) tests := []struct { name string ffEnabled bool forceHTTPS bool serverPort string expected [][2]string }{ { name: "authenticated basic rewrite", ffEnabled: false, expected: [][2]string{ {"https://gitlab-ci-token:job-token@test.local", "https://test.local"}, }, }, { name: "unauthenticated no rewrites without force HTTPS", ffEnabled: true, expected: nil, }, { name: "authenticated with force HTTPS", ffEnabled: false, forceHTTPS: true, expected: [][2]string{ {"https://gitlab-ci-token:job-token@test.local", "https://test.local"}, {"https://gitlab-ci-token:job-token@test.local/", "git@test.local:"}, {"https://gitlab-ci-token:job-token@test.local", "ssh://git@test.local"}, }, }, { name: "unauthenticated with force HTTPS", ffEnabled: true, forceHTTPS: true, expected: [][2]string{ {"https://test.local/", "git@test.local:"}, {"https://test.local", "ssh://git@test.local"}, }, }, { name: "feature flag controls server port wiring", ffEnabled: false, forceHTTPS: true, serverPort: "8022", expected: [][2]string{ {"https://gitlab-ci-token:job-token@test.local", "https://test.local"}, {"https://gitlab-ci-token:job-token@test.local", "ssh://git@test.local:8022"}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ URL: exampleServerURL, }, }, Job: spec.Job{ Token: exampleJobToken, }, } build.Runner.FeatureFlags = map[string]bool{ featureflags.GitURLsWithoutTokens: tt.ffEnabled, } build.Variables.Set(spec.Variable{Key: "CI_SERVER_SHELL_SSH_HOST", Value: exampleServerHost}) if tt.forceHTTPS { build.Variables.Set(spec.Variable{Key: "GIT_SUBMODULE_FORCE_HTTPS", Value: "true"}) } if tt.serverPort != "" { build.Variables.Set(spec.Variable{Key: "CI_SERVER_SHELL_SSH_PORT", Value: tt.serverPort}) } insteadOfs, err := build.GetInsteadOfs() require.NoError(t, err) assert.ElementsMatch(t, tt.expected, insteadOfs) }) } } func TestIsFeatureFlagOn(t *testing.T) { const testFF = "FF_TEST_FEATURE" tests := map[string]struct { featureFlagCfg map[string]bool value string expectedStatus bool }{ "no value": { value: "", expectedStatus: false, }, "true": { value: "true", expectedStatus: true, }, "1": { value: "1", expectedStatus: true, }, "false": { value: "false", expectedStatus: false, }, "0": { value: "0", expectedStatus: false, }, "invalid value": { value: "test", expectedStatus: false, }, "feature flag set inside config.toml take precedence": { featureFlagCfg: map[string]bool{ testFF: true, }, value: "false", expectedStatus: true, }, } for name, testCase := range tests { t.Run(name, func(t *testing.T) { build := new(Build) build.Runner = &RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: testCase.featureFlagCfg, }, } build.Variables = spec.Variables{ {Key: testFF, Value: testCase.value}, } status := build.IsFeatureFlagOn(testFF) assert.Equal(t, testCase.expectedStatus, status) }) } } func TestIsFeatureFlagOn_SetWithRunnerVariables(t *testing.T) { tests := map[string]struct { variable string expectedValue bool }{ "it has default value of FF": { variable: "", expectedValue: false, }, "it enables FF": { variable: "FF_NETWORK_PER_BUILD=true", expectedValue: true, }, "it disable FF": { variable: "FF_NETWORK_PER_BUILD=false", expectedValue: false, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := new(Build) build.Runner = &RunnerConfig{ RunnerSettings: RunnerSettings{ Environment: []string{test.variable}, }, } result := build.IsFeatureFlagOn("FF_NETWORK_PER_BUILD") assert.Equal(t, test.expectedValue, result) }) } } func TestIsFeatureFlagOn_Precedence(t *testing.T) { const testFF = "FF_TEST_FEATURE" t.Run("config takes precedence over job variable", func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: map[string]bool{ testFF: true, }, }, }, Job: spec.Job{ Variables: spec.Variables{ {Key: testFF, Value: "false"}, }, }, } assert.True(t, b.IsFeatureFlagOn(testFF)) }) t.Run("config takes precedence over configured environments", func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: map[string]bool{ testFF: true, }, Environment: []string{testFF + "=false"}, }, }, } assert.True(t, b.IsFeatureFlagOn(testFF)) }) t.Run("variable defined at job take precedence over configured environments", func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Environment: []string{testFF + "=false"}, }, }, Job: spec.Job{ Variables: spec.Variables{ {Key: testFF, Value: "true"}, }, }, } assert.True(t, b.IsFeatureFlagOn(testFF)) }) } func TestGetAllVariables_FeatureFlagResolution(t *testing.T) { testFF := featureflags.UseFastzip tests := map[string]struct { runnerFeatureFlags map[string]bool jobVariables spec.Variables expectedFFValue string description string }{ "TOML feature flag appears in GetAllVariables": { runnerFeatureFlags: map[string]bool{ testFF: true, }, expectedFFValue: "true", description: "TOML-configured feature flag should appear in GetAllVariables", }, "TOML overrides job variable in GetAllVariables": { runnerFeatureFlags: map[string]bool{ testFF: true, }, jobVariables: spec.Variables{ {Key: testFF, Value: "false"}, }, expectedFFValue: "true", description: "TOML setting should override job variable in GetAllVariables", }, "job variable appears when no TOML setting": { jobVariables: spec.Variables{ {Key: testFF, Value: "true"}, }, expectedFFValue: "true", description: "Job variable should appear when no TOML setting exists", }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: tc.runnerFeatureFlags, }, }, Job: spec.Job{ Variables: tc.jobVariables, }, } // GetAllVariables should now contain the resolved feature flag values allVars := build.GetAllVariables() actualValue := allVars.Value(testFF) assert.Equal(t, tc.expectedFFValue, actualValue, tc.description) // Verify IsFeatureFlagOn matches GetAllVariables expectedBool := tc.expectedFFValue == "true" assert.Equal(t, expectedBool, build.IsFeatureFlagOn(testFF), "IsFeatureFlagOn should match the value in GetAllVariables") // Explicitly verify that TOML settings take precedence in both methods if tc.runnerFeatureFlags != nil && tc.jobVariables != nil { assert.Equal(t, tc.runnerFeatureFlags[testFF], build.IsFeatureFlagOn(testFF), "TOML settings should take precedence over job variables") } }) } } func TestStartBuild(t *testing.T) { type startBuildArgs struct { rootDir string cacheDir string customBuildDirEnabled bool sharedDir bool safeDirectoryCheckout bool } tests := map[string]struct { args startBuildArgs jobVariables spec.Variables expectedBuildDir string expectedCacheDir string expectedSafeDirectoryCheckout bool expectedError bool }{ "no job specific build dir with no shared dir": { args: startBuildArgs{ rootDir: "/build", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{}, expectedBuildDir: "/build/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: false, expectedError: false, }, "no job specified build dir with shared dir": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: true, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{}, expectedBuildDir: "/builds/1234/0/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: false, expectedError: false, }, "valid GIT_CLONE_PATH was specified": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{ {Key: "GIT_CLONE_PATH", Value: "/builds/go/src/gitlab.com/test-namespace/test-repo", Public: true}, }, expectedBuildDir: "/builds/go/src/gitlab.com/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: false, expectedError: false, }, "valid GIT_CLONE_PATH using CI_BUILDS_DIR was specified": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{ { Key: "GIT_CLONE_PATH", Value: "$CI_BUILDS_DIR/go/src/gitlab.com/test-namespace/test-repo", Public: true, }, }, expectedBuildDir: "/builds/go/src/gitlab.com/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: false, expectedError: false, }, "out-of-bounds GIT_CLONE_PATH was specified": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{ { Key: "GIT_CLONE_PATH", Value: "/builds/../outside", Public: true, }, }, expectedError: true, }, "custom build disabled": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: false, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{ {Key: "GIT_CLONE_PATH", Value: "/builds/go/src/gitlab.com/test-namespace/test-repo", Public: true}, }, expectedBuildDir: "/builds/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: false, expectedError: true, }, "invalid GIT_CLONE_PATH was specified": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, safeDirectoryCheckout: false, }, jobVariables: spec.Variables{ {Key: "GIT_CLONE_PATH", Value: "/go/src/gitlab.com/test-namespace/test-repo", Public: true}, }, expectedError: true, }, "safeDirectoryCheckout enabled": { args: startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: false, sharedDir: false, safeDirectoryCheckout: true, }, jobVariables: nil, expectedBuildDir: "/builds/test-namespace/test-repo", expectedCacheDir: "/cache/test-namespace/test-repo", expectedSafeDirectoryCheckout: true, expectedError: false, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := Build{ Job: spec.Job{ GitInfo: spec.GitInfo{ RepoURL: "https://gitlab.com/test-namespace/test-repo.git", }, Variables: test.jobVariables, }, Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "1234", }, }, } err := build.StartBuild( test.args.rootDir, test.args.cacheDir, test.args.customBuildDirEnabled, test.args.sharedDir, test.args.safeDirectoryCheckout, ) if test.expectedError { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, test.expectedBuildDir, build.BuildDir) assert.Equal(t, test.args.rootDir, build.RootDir) assert.Equal(t, test.expectedCacheDir, build.CacheDir) assert.Equal(t, test.expectedSafeDirectoryCheckout, build.SafeDirectoryCheckout) }) } } func TestTmpProjectDir(t *testing.T) { createTestBuild := func(variables spec.Variables) Build { return Build{ Job: spec.Job{ GitInfo: spec.GitInfo{ RepoURL: "https://gitlab.com/test-namespace/test-repo.git", }, Variables: variables, }, Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "1234", }, }, } } type startBuildArgs struct { rootDir string cacheDir string customBuildDirEnabled bool sharedDir bool } testStartBuildArgs := startBuildArgs{ rootDir: "/builds", cacheDir: "/cache", customBuildDirEnabled: true, sharedDir: false, } tests := map[string]struct { args startBuildArgs jobVariables spec.Variables expectedTmpProjectDir string expectedError bool }{ "test default build dir": { args: testStartBuildArgs, jobVariables: nil, expectedError: false, expectedTmpProjectDir: "/builds/test-namespace/test-repo.tmp", }, "test custom build dir with double trailing slashes": { args: testStartBuildArgs, jobVariables: spec.Variables{ {Key: "GIT_CLONE_PATH", Value: "/builds/go/src/gitlab.com/test-namespace/test-repo//", Public: true}, }, expectedError: false, expectedTmpProjectDir: "/builds/go/src/gitlab.com/test-namespace/test-repo.tmp", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { build := createTestBuild(tt.jobVariables) err := build.StartBuild( tt.args.rootDir, tt.args.cacheDir, tt.args.customBuildDirEnabled, tt.args.sharedDir, false, ) if tt.expectedError { assert.Error(t, err) return } assert.NoError(t, err) dir := build.TmpProjectDir() assert.Equal(t, tt.expectedTmpProjectDir, dir) }) } } func TestSkipBuildStageFeatureFlag(t *testing.T) { featureFlagValues := []string{ "true", "false", } s := NewMockShell(t) s.On("GetName").Return("skip-build-stage-shell") s.On("IsDefault").Return(false).Maybe() RegisterShell(s) for _, value := range featureFlagValues { t.Run(value, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{ { Key: featureflags.SkipNoOpBuildStages, Value: "false", }, }, }, } e := NewMockExecutor(t) s.On("GenerateScript", mock.Anything, mock.Anything, mock.Anything).Return("script", ErrSkipBuildStage) e.On("Shell").Return(&ShellScriptInfo{Shell: "skip-build-stage-shell"}) if !build.IsFeatureFlagOn(featureflags.SkipNoOpBuildStages) { e.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() } err := build.executeStage(t.Context(), BuildStageAfterScript, e) assert.NoError(t, err) }) } } func TestWaitForTerminal(t *testing.T) { cases := []struct { name string cancelFn func(ctxCancel context.CancelFunc, build *Build) jobTimeout int waitForTerminalTimeout time.Duration expectedErr string }{ { name: "Cancel build", cancelFn: func(ctxCancel context.CancelFunc, build *Build) { ctxCancel() }, jobTimeout: 3600, waitForTerminalTimeout: time.Hour, expectedErr: "build cancelled, killing session", }, { name: "Terminal Timeout", cancelFn: func(ctxCancel context.CancelFunc, build *Build) { // noop }, jobTimeout: 3600, waitForTerminalTimeout: time.Second, expectedErr: "terminal session timed out (maximum time allowed - 1s)", }, { name: "System Interrupt", cancelFn: func(ctxCancel context.CancelFunc, build *Build) { build.SystemInterrupt <- os.Interrupt }, jobTimeout: 3600, waitForTerminalTimeout: time.Hour, expectedErr: "terminal disconnected by system signal: interrupt", }, { name: "Terminal Disconnect", cancelFn: func(ctxCancel context.CancelFunc, build *Build) { build.Session.DisconnectCh <- errors.New("user disconnect") }, jobTimeout: 3600, waitForTerminalTimeout: time.Hour, expectedErr: "terminal disconnected: user disconnect", }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { build := Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: "shell", }, }, Job: spec.Job{ RunnerInfo: spec.RunnerInfo{ Timeout: c.jobTimeout, }, }, SystemInterrupt: make(chan os.Signal), } trace := Trace{Writer: os.Stdout} build.logger = buildlogger.New(&trace, build.Log(), buildlogger.Options{}) sess, err := session.NewSession(nil) require.NoError(t, err) build.Session = sess srv := httptest.NewServer(build.Session.Handler()) defer srv.Close() mockConn := terminal.NewMockConn(t) mockConn.On("Close").Maybe().Return(nil) // On Start upgrade the web socket connection and wait for the // timeoutCh to exit, to mock real work made on the websocket. mockConn. On("Start", mock.Anything, mock.Anything, mock.Anything, mock.Anything). Run(func(args mock.Arguments) { upgrader := &websocket.Upgrader{} r := args[1].(*http.Request) w := args[0].(http.ResponseWriter) _, _ = upgrader.Upgrade(w, r, nil) timeoutCh := args[2].(chan error) <-timeoutCh }).Once() mockTerminal := terminal.NewMockInteractiveTerminal(t) mockTerminal.On("TerminalConnect").Return(mockConn, nil) sess.SetInteractiveTerminal(mockTerminal) u := url.URL{ Scheme: "ws", Host: srv.Listener.Addr().String(), Path: build.Session.Endpoint + "/exec", } headers := http.Header{ "Authorization": []string{build.Session.Token}, } conn, resp, err := websocket.DefaultDialer.Dial(u.String(), headers) require.NotNil(t, conn) require.NoError(t, err) defer func() { resp.Body.Close() conn.Close() }() ctx, cancel := context.WithTimeout(t.Context(), build.GetBuildTimeout()) errCh := make(chan error) go func() { errCh <- build.waitForTerminal(ctx, c.waitForTerminalTimeout) }() c.cancelFn(cancel, &build) assert.EqualError(t, <-errCh, c.expectedErr) }) } } func TestBuild_IsLFSSmudgeDisabled(t *testing.T) { testCases := map[string]struct { isVariableUnset bool variableValue string expectedResult bool }{ "variable not set": { isVariableUnset: true, expectedResult: false, }, "variable empty": { variableValue: "", expectedResult: false, }, "variable set to true": { variableValue: "true", expectedResult: true, }, "variable set to false": { variableValue: "false", expectedResult: false, }, "variable set to 1": { variableValue: "1", expectedResult: true, }, "variable set to 0": { variableValue: "0", expectedResult: false, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { b := &Build{ Job: spec.Job{ Variables: spec.Variables{}, }, } if !testCase.isVariableUnset { b.Variables = append( b.Variables, spec.Variable{Key: "GIT_LFS_SKIP_SMUDGE", Value: testCase.variableValue, Public: true}, ) } assert.Equal(t, testCase.expectedResult, b.IsLFSSmudgeDisabled()) }) } } func TestGitSubmodulePaths(t *testing.T) { tests := map[string]struct { isVariableSet bool value string expectedResult []string expectedError bool }{ "not defined": { isVariableSet: false, value: "", expectedResult: nil, expectedError: false, }, "empty": { isVariableSet: true, value: "", expectedResult: nil, expectedError: false, }, "select submodule 1": { isVariableSet: true, value: "submodule1", expectedResult: []string{"submodule1"}, expectedError: false, }, "select submodule 1 and 2": { isVariableSet: true, value: "submodule1 submodule2", expectedResult: []string{"submodule1", "submodule2"}, expectedError: false, }, "select submodule 1 and exclude 2": { isVariableSet: true, value: "submodule1 :(exclude)submodule2", expectedResult: []string{"submodule1", ":(exclude)submodule2"}, expectedError: false, }, "exclude submodule 1": { isVariableSet: true, value: " :(exclude)submodule1", expectedResult: []string{":(exclude)submodule1"}, expectedError: false, }, "exclude submodule 1 and 2": { isVariableSet: true, value: " :(exclude)submodule1 :(exclude)submodule2 ", expectedResult: []string{":(exclude)submodule1", ":(exclude)submodule2"}, expectedError: false, }, "exclude submodule with single space": { isVariableSet: true, value: ":(exclude) gitlab-grack", expectedResult: nil, expectedError: true, }, "exclude submodule with multiple spaces": { isVariableSet: true, value: ":(exclude) gitlab-grack", expectedResult: nil, expectedError: true, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{}, }, } if test.isVariableSet { build.Variables = append( build.Variables, spec.Variable{Key: "GIT_SUBMODULE_PATHS", Value: test.value, Public: true}, ) } result, err := build.GetSubmodulePaths() if test.expectedError { assert.Error(t, err) assert.Contains(t, err.Error(), "invalid submodule pathspec") } else { assert.Equal(t, test.expectedResult, result) assert.NoError(t, err) } }) } } func TestGitCleanFlags(t *testing.T) { tests := map[string]struct { value string expectedResult []string }{ "empty clean flags": { value: "", expectedResult: []string{"-ffdx"}, }, "use custom flags": { value: "custom-flags", expectedResult: []string{"custom-flags"}, }, "use custom flags with multiple arguments": { value: "-ffdx -e cache/", expectedResult: []string{"-ffdx", "-e", "cache/"}, }, "disabled": { value: "none", expectedResult: []string{}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{ {Key: "GIT_CLEAN_FLAGS", Value: test.value}, }, }, } result := build.GetGitCleanFlags() assert.Equal(t, test.expectedResult, result) }) } } func TestGitCloneFlags(t *testing.T) { tests := map[string]struct { value string expectedResult []string }{ "empty clone flags": { value: "", expectedResult: []string{}, }, "use single custom flag": { value: "--bare", expectedResult: []string{"--bare"}, }, "use custom flags with multiple arguments": { value: "--no-tags --filter=blob:none", expectedResult: []string{"--no-tags", "--filter=blob:none"}, }, "use another custom flag": { value: "--reference-if-available /tmp/test --no-tags", expectedResult: []string{"--reference-if-available", "/tmp/test", "--no-tags"}, }, "disabled": { value: "none", expectedResult: []string{}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{ {Key: "GIT_CLONE_EXTRA_FLAGS", Value: test.value}, }, }, } result := build.GetGitCloneFlags() assert.Equal(t, test.expectedResult, result) }) } } func TestGitFetchFlags(t *testing.T) { tests := map[string]struct { value string expectedResult []string }{ "empty fetch flags": { value: "", expectedResult: []string{"--prune", "--quiet"}, }, "use custom flags": { value: "custom-flags", expectedResult: []string{"custom-flags"}, }, "use custom flags with multiple arguments": { value: "--prune --tags --quiet", expectedResult: []string{"--prune", "--tags", "--quiet"}, }, "disabled": { value: "none", expectedResult: []string{}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{ {Key: "GIT_FETCH_EXTRA_FLAGS", Value: test.value}, }, }, } result := build.GetGitFetchFlags() assert.Equal(t, test.expectedResult, result) }) } } func TestGetRepositoryObjectFormat(t *testing.T) { tests := map[string]struct { value string expectedResult string }{ "empty value": { value: "", expectedResult: "sha1", }, "sha1": { value: "sha1", expectedResult: "sha1", }, "sha256": { value: "sha256", expectedResult: "sha256", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ GitInfo: spec.GitInfo{ RepoObjectFormat: test.value, }, }, } result := build.GetRepositoryObjectFormat() assert.Equal(t, test.expectedResult, result) }) } } func TestGitSubmoduleUpdateFlags(t *testing.T) { tests := map[string]struct { value string expectedResult []string }{ "empty update flags": { value: "", expectedResult: nil, }, "use custom update flags": { value: "custom-flags", expectedResult: []string{"custom-flags"}, }, "use custom update flags with multiple arguments": { value: "--remote --jobs 4", expectedResult: []string{"--remote", "--jobs", "4"}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: spec.Variables{ {Key: "GIT_SUBMODULE_UPDATE_FLAGS", Value: test.value}, }, }, } result := build.GetGitSubmoduleUpdateFlags() assert.Equal(t, test.expectedResult, result) }) } } func TestDefaultVariables(t *testing.T) { tests := map[string]struct { jobVariables spec.Variables rootDir string key string expectedValue string }{ "get default CI_SERVER value": { jobVariables: spec.Variables{}, rootDir: "/builds", key: "CI_SERVER", expectedValue: "yes", }, "get default CI_PROJECT_DIR value": { jobVariables: spec.Variables{}, rootDir: "/builds", key: "CI_PROJECT_DIR", expectedValue: "/builds/test-namespace/test-repo", }, "get overwritten CI_PROJECT_DIR value": { jobVariables: spec.Variables{ {Key: "GIT_CLONE_PATH", Value: "/builds/go/src/gitlab.com/gitlab-org/gitlab-runner", Public: true}, }, rootDir: "/builds", key: "CI_PROJECT_DIR", expectedValue: "/builds/go/src/gitlab.com/gitlab-org/gitlab-runner", }, "CI_BUILD_NETWORK_NAME added when FF_NETWORK_PER_BUILD is enabled": { jobVariables: spec.Variables{ {Key: featureflags.NetworkPerBuild, Value: "true"}, }, rootDir: "/builds", key: "CI_BUILD_NETWORK_NAME", expectedValue: "runner-1234-0-0-0", }, "CI_BUILD_NETWORK_NAME not added when FF_NETWORK_PER_BUILD is disabled": { jobVariables: spec.Variables{ {Key: featureflags.NetworkPerBuild, Value: "false"}, }, rootDir: "/builds", key: "CI_BUILD_NETWORK_NAME", expectedValue: "", }, "CI_BUILD_NETWORK_NAME cannot be overridden by job variables": { jobVariables: spec.Variables{ {Key: featureflags.NetworkPerBuild, Value: "true"}, {Key: "CI_BUILD_NETWORK_NAME", Value: "user-override"}, }, rootDir: "/builds", key: "CI_BUILD_NETWORK_NAME", expectedValue: "runner-1234-0-0-0", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := Build{ Job: spec.Job{ GitInfo: spec.GitInfo{ RepoURL: "https://gitlab.com/test-namespace/test-repo.git", }, Variables: test.jobVariables, }, Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "1234", }, }, } err := build.StartBuild(test.rootDir, "/cache", true, false, false) assert.NoError(t, err) variable := build.GetAllVariables().Get(test.key) assert.Equal(t, test.expectedValue, variable) }) } } func TestBuildFinishTimeout(t *testing.T) { tests := map[string]bool{ "channel returns first": true, "timeout returns first": false, } for name, chanFirst := range tests { t.Run(name, func(t *testing.T) { logger, hooks := test.NewNullLogger() build := Build{ logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } buildFinish := make(chan error, 1) timeout := 10 * time.Millisecond if chanFirst { buildFinish <- errors.New("job finish error") } build.waitForBuildFinish(buildFinish, timeout) entry := hooks.LastEntry() if chanFirst { assert.Nil(t, entry) return } assert.NotNil(t, entry) }) } } func TestProjectUniqueName(t *testing.T) { tests := map[string]struct { build *Build expectedName string }{ "project non rfc1132 unique name": { build: &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "Ze_n8E6en622WxxSg4r8", }, }, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 1234567890, }, }, ProjectRunnerID: 0, }, expectedName: "runner-zen8e6en-project-1234567890-concurrent-0", }, "project normal unique name": { build: &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "xYzWabc-Ij3xlKjmoPO9", }, }, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 1234567890, }, }, ProjectRunnerID: 0, }, expectedName: "runner-xyzwabc-i-project-1234567890-concurrent-0", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, test.expectedName, test.build.ProjectUniqueName()) }) } } func TestProjectUniqueShortName(t *testing.T) { tests := map[string]struct { build *Build expectedName string }{ "project non rfc1132 unique name": { build: &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "Ze_n8E6en622WxxSg4r8", }, }, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 1234567890, }, }, ProjectRunnerID: 0, }, expectedName: "runner-zen8e6en-1234567890-0-0", }, "project normal unique name without build id": { build: &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "xYzWabc-Ij3xlKjmoPO9", }, }, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 1234567890, }, }, ProjectRunnerID: 0, }, expectedName: "runner-xyzwabc-i-1234567890-0-0", }, "project normal unique name with build id": { build: &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Token: "xYzWabc-Ij3xlKjmoPO9", }, }, Job: spec.Job{ ID: 12345, JobInfo: spec.JobInfo{ ProjectID: 1234567890, }, }, ProjectRunnerID: 222222, }, expectedName: "runner-xyzwabc-i-1234567890-222222-12345", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, test.expectedName, test.build.ProjectUniqueShortName()) }) } } func TestProjectRealUniqueName(t *testing.T) { t.Parallel() tests := map[string]struct { name string token string projectID int64 projectRunnerID int systemID string expectedUniqueName string }{ "zero values": { expectedUniqueName: "runner-f1969ebde09ffbae93df68a9aec385a8", }, "with token": { token: "some-random-token-and-we-sure-don't-run-it-through-the-shortener", expectedUniqueName: "runner-f563c42913906cc3c0c50d55b005ce86", }, "with token & system ID": { token: "some-random-token-and-we-sure-don't-run-it-through-the-shortener", systemID: "some-system-ID", expectedUniqueName: "runner-576923f59d7b85f6258fe7e56d254ce0", }, "with token & system ID & project ID": { token: "some-random-token-and-we-sure-don't-run-it-through-the-shortener", systemID: "some-system-ID", projectID: 42, expectedUniqueName: "runner-896339b5ef9bebb3cbb72960ea8e89bb", }, "with token & system ID & project ID & project runner ID": { token: "some-random-token-and-we-sure-don't-run-it-through-the-shortener", systemID: "some-system-ID", projectID: 42, projectRunnerID: 4242, expectedUniqueName: "runner-9d75c021c38f7957cb372857766d74b4", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { t.Parallel() build := &Build{Runner: &RunnerConfig{}} build.Runner.RunnerCredentials.Token = test.token build.Runner.SystemID = test.systemID build.Job.JobInfo.ProjectID = test.projectID build.ProjectRunnerID = test.projectRunnerID assert.Equal(t, test.expectedUniqueName, build.ProjectRealUniqueName()) }) } } func TestBuildStages(t *testing.T) { scriptOnlyBuild, err := GetRemoteSuccessfulBuild() require.NoError(t, err) multistepBuild, err := GetRemoteSuccessfulMultistepBuild() require.NoError(t, err) tests := map[string]struct { jobResponse spec.Job expectedStages []BuildStage }{ "script only build": { jobResponse: scriptOnlyBuild, expectedStages: append(staticBuildStages, "step_script"), }, "multistep build": { jobResponse: multistepBuild, expectedStages: append(staticBuildStages, "step_script", "step_release"), }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { build := &Build{ Job: tt.jobResponse, } assert.ElementsMatch(t, tt.expectedStages, build.BuildStages()) }) } } func TestBuild_GetExecutorJobSectionAttempts(t *testing.T) { tests := []struct { attempts string expectedAttempts int expectedErr bool }{ { attempts: "", expectedAttempts: 1, }, { attempts: "3", expectedAttempts: 3, }, { attempts: "0", expectedAttempts: DefaultExecutorStageAttempts, expectedErr: true, }, { attempts: "99", expectedAttempts: DefaultExecutorStageAttempts, expectedErr: true, }, } for _, tt := range tests { t.Run(tt.attempts, func(t *testing.T) { build := Build{ Job: spec.Job{ Variables: spec.Variables{ spec.Variable{ Key: ExecutorJobSectionAttempts, Value: tt.attempts, }, }, }, } attempts := build.GetExecutorJobSectionAttempts() if tt.expectedErr { assert.NotEmpty(t, build.Settings().Errors) } assert.Equal(t, tt.expectedAttempts, attempts) }) } } func TestBuild_getFeatureFlagInfo(t *testing.T) { const changedFeatureFlags = "FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY:true" tests := []struct { value string expectedStatus string }{ { value: "true", expectedStatus: changedFeatureFlags, }, { value: "1", expectedStatus: changedFeatureFlags, }, { value: "invalid", expectedStatus: "", }, } for _, tt := range tests { t.Run(tt.value, func(t *testing.T) { b := Build{ Job: spec.Job{ Variables: spec.Variables{ spec.Variable{ Key: featureflags.UseLegacyKubernetesExecutionStrategy, Value: tt.value, Public: true, }, }, }, Runner: &RunnerConfig{}, } assert.Equal(t, tt.expectedStatus, b.getFeatureFlagInfo()) }) } } func setupSuccessfulMockExecutor( t *testing.T, prepareFn func(options ExecutorPrepareOptions) error, ) *MockExecutorProvider { executor, provider := setupMockExecutorAndProvider(t) // We run everything once executor.On("Prepare", mock.Anything).Return(prepareFn).Once() executor.On("Finish", nil).Once() executor.On("Cleanup").Once() // Run script successfully executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageGetSources)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageRestoreCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageDownloadArtifacts)).Return(nil).Once() executor.On("Run", matchBuildStage("step_script")).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageAfterScript)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageArchiveOnSuccessCache)).Return(nil).Once() executor.On("Run", matchBuildStage(BuildStageUploadOnSuccessArtifacts)). Return(nil). Once() executor.On("Run", matchBuildStage(BuildStageCleanup)). Return(nil). Once() return provider } func setupMockExecutorAndProvider(t *testing.T) (*MockExecutor, *MockExecutorProvider) { e := NewMockExecutor(t) p := NewMockExecutorProvider(t) p.On("GetFeatures", mock.Anything).Return(nil).Once() p.On("Create").Return(e).Once() return e, p } func registerExecutorWithSuccessfulBuild(t *testing.T, p *MockExecutorProvider, rc *RunnerConfig) *Build { require.NotNil(t, rc) successfulBuild, err := GetSuccessfulBuild() require.NoError(t, err) if rc.RunnerSettings.Executor == "" { // Ensure we set the executor name if not already defined rc.RunnerSettings.Executor = t.Name() } build, err := NewBuild(successfulBuild, rc, nil, nil, p) assert.NoError(t, err) return build } func runSuccessfulMockBuild(t *testing.T, prepareFn func(options ExecutorPrepareOptions) error) *Build { p := setupSuccessfulMockExecutor(t, prepareFn) build := registerExecutorWithSuccessfulBuild(t, p, new(RunnerConfig)) err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.NoError(t, err) return build } func TestSecretsResolving(t *testing.T) { exampleVariables := spec.Variables{ {Key: "key", Value: "value"}, } setupFailureExecutorMocks := func(t *testing.T) *MockExecutorProvider { return NewMockExecutorProvider(t) } secrets := spec.Secrets{ "TEST_SECRET": spec.Secret{ Vault: &spec.VaultSecret{}, }, } tests := map[string]struct { secrets spec.Secrets resolverCreationError error prepareExecutorProvider func(t *testing.T) *MockExecutorProvider returnVariables spec.Variables resolvingError error expectedVariables spec.Variables expectedError error }{ "secrets not present": { prepareExecutorProvider: func(t *testing.T) *MockExecutorProvider { return setupSuccessfulMockExecutor(t, func(options ExecutorPrepareOptions) error { return nil }) }, expectedError: nil, }, "error on creating resolver": { secrets: secrets, resolverCreationError: assert.AnError, prepareExecutorProvider: setupFailureExecutorMocks, expectedError: assert.AnError, }, "error on secrets resolving": { secrets: secrets, prepareExecutorProvider: setupFailureExecutorMocks, returnVariables: exampleVariables, resolvingError: assert.AnError, expectedVariables: nil, expectedError: assert.AnError, }, "secrets resolved": { secrets: secrets, prepareExecutorProvider: func(t *testing.T) *MockExecutorProvider { return setupSuccessfulMockExecutor(t, func(options ExecutorPrepareOptions) error { return nil }) }, returnVariables: exampleVariables, resolvingError: nil, expectedVariables: exampleVariables, expectedError: nil, }, "secret not found - FF_SECRET_RESOLVING_FAILS_IF_MISSING enabled": { secrets: secrets, prepareExecutorProvider: setupFailureExecutorMocks, returnVariables: nil, resolvingError: fmt.Errorf("%w: %s", ErrSecretNotFound, "secret_key"), expectedVariables: nil, expectedError: ErrSecretNotFound, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { secretsResolverMock := NewMockSecretsResolver(t) p := tt.prepareExecutorProvider(t) successfulBuild, err := GetSuccessfulBuild() require.NoError(t, err) successfulBuild.Secrets = tt.secrets if tt.resolverCreationError == nil && tt.secrets != nil { secretsResolverMock.On("Resolve", tt.secrets). Return(tt.returnVariables, tt.resolvingError). Once() } rc := new(RunnerConfig) rc.RunnerSettings.Executor = t.Name() build, err := NewBuild(successfulBuild, rc, nil, nil, p) assert.NoError(t, err) build.secretsResolver = func(_ logger, _ SecretResolverRegistry, _ func(string) bool) (SecretsResolver, error) { return secretsResolverMock, tt.resolverCreationError } err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.Equal(t, tt.expectedVariables, build.secretsVariables) if tt.expectedError != nil { assert.ErrorIs(t, err, tt.expectedError) return } assert.NoError(t, err) }) } } func TestSetTraceStatus(t *testing.T) { tests := map[string]struct { err error assert func(*testing.T, *mockLightJobTrace, error) }{ "nil error is successful": { err: nil, assert: func(t *testing.T, mt *mockLightJobTrace, err error) { mt.On("Success").Return(nil).Once() }, }, "build error, script failure": { err: &BuildError{FailureReason: ScriptFailure}, assert: func(t *testing.T, mt *mockLightJobTrace, err error) { mt.On("Fail", err, JobFailureData{Reason: ScriptFailure, Mode: JobExecutionModeTraditional}).Return(nil).Once() }, }, "build error, wrapped script failure": { err: fmt.Errorf("wrapped: %w", &BuildError{FailureReason: ScriptFailure}), assert: func(t *testing.T, mt *mockLightJobTrace, err error) { mt.On("Fail", err, JobFailureData{Reason: ScriptFailure, Mode: JobExecutionModeTraditional}).Return(nil).Once() }, }, "non-build error": { err: fmt.Errorf("some error"), assert: func(t *testing.T, mt *mockLightJobTrace, err error) { mt.On("Fail", err, JobFailureData{Reason: RunnerSystemFailure, Mode: JobExecutionModeTraditional}).Return(nil).Once() }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{}, } trace := NewMockLightJobTrace(t) trace.On("IsStdout").Return(true) var be *BuildError if errors.As(tc.err, &be) { trace.On("SetSupportedFailureReasonMapper", mock.Anything).Once() } tc.assert(t, trace, tc.err) b.setTraceStatus(trace, tc.err) }) } } func Test_GetDebugServicePolicy(t *testing.T) { tests := map[string]struct { variable spec.Variable want bool wantLog string }{ "empty": {want: false}, "disabled": { variable: spec.Variable{Key: "CI_DEBUG_SERVICES", Value: "false", Public: true}, want: false, }, "bogus value": { variable: spec.Variable{Key: "CI_DEBUG_SERVICES", Value: "blammo", Public: true}, want: false, wantLog: "CI_DEBUG_SERVICES: expected bool got \"blammo\", using default value: false", }, "enabled": { variable: spec.Variable{Key: "CI_DEBUG_SERVICES", Value: "true", Public: true}, want: true, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{Variables: []spec.Variable{tt.variable}}, } got := b.IsCIDebugServiceEnabled() assert.Equal(t, tt.want, got) if tt.wantLog == "" { assert.Empty(t, b.Settings().Errors) } else { assert.Contains(t, errors.Join(b.Settings().Errors...).Error(), tt.wantLog) } }) } } func Test_expandContainerOptions(t *testing.T) { testCases := map[string]struct { jobVars spec.Variables image spec.Image services spec.Services }{ "no expansion required": { image: spec.Image{Name: "alpine:latest", Alias: "jobctr"}, services: spec.Services{ {Name: "postgres:latest", Alias: "db, pg"}, {Name: "redis:latest", Alias: "cache"}, }, }, "expansion required": { jobVars: spec.Variables{ {Key: "JOB_IMAGE", Value: "alpine:latest"}, {Key: "JOB_ALIAS", Value: "jobctr"}, {Key: "DB_IMAGE", Value: "postgres:latest"}, {Key: "DB_IMAGE_ALIAS", Value: "db"}, {Key: "CACHE_IMAGE", Value: "redis:latest"}, {Key: "CACHE_IMAGE_ALIAS", Value: "cache"}, }, image: spec.Image{Name: "$JOB_IMAGE", Alias: "$JOB_ALIAS"}, services: spec.Services{ {Name: "$DB_IMAGE", Alias: "$DB_IMAGE_ALIAS, pg"}, {Name: "$CACHE_IMAGE", Alias: "$CACHE_IMAGE_ALIAS"}, }, }, } for name, tt := range testCases { t.Run(name, func(t *testing.T) { b := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Variables: tt.jobVars, Image: tt.image, Services: tt.services, }, } b.GetAllVariables() b.expandContainerOptions() assert.Equal(t, "alpine:latest", b.Image.Name) assert.Equal(t, "jobctr", b.Image.Alias) assert.Len(t, b.Services, 2) assert.Equal(t, "postgres:latest", b.Services[0].Name) assert.Equal(t, []string{"db", "pg"}, b.Services[0].Aliases()) assert.Equal(t, "redis:latest", b.Services[1].Name) assert.Equal(t, []string{"cache"}, b.Services[1].Aliases()) }) } } func TestPrintPolicyOptions(t *testing.T) { falseValue := false trueValue := true testCases := []struct { desc string policyOptions spec.PolicyOptions contains []string }{ { desc: "without policy options", }, { desc: "not a policy job", policyOptions: spec.PolicyOptions{ PolicyJob: false, }, }, { desc: "policy job without override", policyOptions: spec.PolicyOptions{ PolicyJob: true, Name: "Test Policy", }, contains: []string{`Job triggered by policy \"Test Policy\".`, "Variables defined in the policy take precedence over matching user-defined CI/CD variables for this job."}, }, { desc: "policy job with override allowed", policyOptions: spec.PolicyOptions{ PolicyJob: true, Name: "Test Policy", VariableOverrideAllowed: &trueValue, }, contains: []string{`Job triggered by policy \"Test Policy\".`, "User-defined CI/CD variables are allowed in this job according to the policy."}, }, { desc: "policy job with override allowed with exceptions", policyOptions: spec.PolicyOptions{ PolicyJob: true, Name: "Test Policy", VariableOverrideAllowed: &trueValue, VariableOverrideExceptions: []string{"EXCEPTION_VAR1", "EXCEPTION_VAR2"}, }, contains: []string{`Job triggered by policy \"Test Policy\".`, "User-defined CI/CD variables are allowed in this job (except for EXCEPTION_VAR1, EXCEPTION_VAR2) according to the policy."}, }, { desc: "policy job with override denied", policyOptions: spec.PolicyOptions{ PolicyJob: true, Name: "Test Policy", VariableOverrideAllowed: &falseValue, }, contains: []string{`Job triggered by policy \"Test Policy\".`, "User-defined CI/CD variables are ignored in this job according to the policy."}, }, { desc: "policy job with override denied with exceptions", policyOptions: spec.PolicyOptions{ PolicyJob: true, Name: "Test Policy", VariableOverrideAllowed: &falseValue, VariableOverrideExceptions: []string{"EXCEPTION_VAR1", "EXCEPTION_VAR2"}, }, contains: []string{`Job triggered by policy \"Test Policy\".`, "User-defined CI/CD variables are ignored in this job (except for EXCEPTION_VAR1, EXCEPTION_VAR2) according to the policy."}, }, } for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { logs := bytes.Buffer{} lentry := logrus.New() lentry.Out = &logs logger := buildlogger.New(nil, logrus.NewEntry(lentry), buildlogger.Options{}) b := &Build{ Runner: &RunnerConfig{}, Job: spec.Job{ PolicyOptions: tc.policyOptions, }, logger: logger, } b.printPolicyOptions() if len(tc.contains) == 0 { assert.Empty(t, logs.String()) } else { for i := range tc.contains { assert.Contains(t, logs.String(), tc.contains[i]) } } }) } } func TestGetStageTimeoutContexts(t *testing.T) { defaultTimeouts := []stageTimeout{ {configName: "RUNNER_SCRIPT_TIMEOUT", defaultTimeout: 0}, {configName: "RUNNER_AFTER_SCRIPT_TIMEOUT", defaultTimeout: 5 * time.Minute}, } tests := map[string]struct { variables map[string]string expected map[string]time.Duration contains []string jobTimeout time.Duration }{ "after_script must have a timeout, even if set to zero": { variables: map[string]string{ "RUNNER_AFTER_SCRIPT_TIMEOUT": "0s", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": time.Hour, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, jobTimeout: time.Hour, }, "no timeout provided": { variables: map[string]string{}, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": time.Hour, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, jobTimeout: time.Hour, }, "timeout absolute": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "5m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 5 * time.Minute, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, jobTimeout: time.Hour, }, "timeout last relative": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "5m", "RUNNER_AFTER_SCRIPT_TIMEOUT": "-10m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 5 * time.Minute, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, contains: []string{"Ignoring relative RUNNER_AFTER_SCRIPT_TIMEOUT timeout: -10m"}, jobTimeout: time.Hour, }, "timeout first relative": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "-5m", "RUNNER_AFTER_SCRIPT_TIMEOUT": "10m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": time.Hour, "RUNNER_AFTER_SCRIPT_TIMEOUT": 10 * time.Minute, }, contains: []string{"Ignoring relative RUNNER_SCRIPT_TIMEOUT timeout: -5m"}, jobTimeout: time.Hour, }, "timeout both relative": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "-15m", "RUNNER_AFTER_SCRIPT_TIMEOUT": "-40m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 1 * time.Hour, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, contains: []string{ "Ignoring relative RUNNER_SCRIPT_TIMEOUT timeout: -15", "Ignoring relative RUNNER_AFTER_SCRIPT_TIMEOUT timeout: -40m", }, jobTimeout: time.Hour, }, "timeout relative and exceeds timeout": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "-40m", "RUNNER_AFTER_SCRIPT_TIMEOUT": "-40m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": time.Hour, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, contains: []string{ "Ignoring relative RUNNER_SCRIPT_TIMEOUT timeout: -40m", "Ignoring relative RUNNER_AFTER_SCRIPT_TIMEOUT timeout: -40m", }, jobTimeout: time.Hour, }, "timeout relative and exceeds timeout and no time left": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "-40m", "RUNNER_AFTER_SCRIPT_TIMEOUT": "-40m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 1 * time.Millisecond, "RUNNER_AFTER_SCRIPT_TIMEOUT": 1 * time.Millisecond, }, contains: []string{ "Ignoring relative RUNNER_SCRIPT_TIMEOUT timeout: -40m", "Ignoring relative RUNNER_AFTER_SCRIPT_TIMEOUT timeout: -40m", }, jobTimeout: time.Millisecond, }, "timeout is invalid": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "foobar", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 0, "RUNNER_AFTER_SCRIPT_TIMEOUT": time.Millisecond, }, contains: []string{"Ignoring malformed RUNNER_SCRIPT_TIMEOUT timeout: foobar"}, jobTimeout: time.Millisecond, }, "timeout when no parent timeout": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "-10m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 0, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, contains: []string{"Ignoring relative RUNNER_SCRIPT_TIMEOUT timeout: -10m"}, }, "script timeout longer than job timeout": { variables: map[string]string{ "RUNNER_SCRIPT_TIMEOUT": "60m", }, expected: map[string]time.Duration{ "RUNNER_SCRIPT_TIMEOUT": 40 * time.Minute, "RUNNER_AFTER_SCRIPT_TIMEOUT": 5 * time.Minute, }, contains: []string{"RUNNER_SCRIPT_TIMEOUT timeout: 60m is longer than job timeout."}, jobTimeout: 40 * time.Minute, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { logs := bytes.Buffer{} lentry := logrus.New() lentry.Out = &logs logger := buildlogger.New(nil, logrus.NewEntry(lentry), buildlogger.Options{}) b := &Build{ Runner: &RunnerConfig{}, logger: logger, } for key, val := range tc.variables { b.Variables = append(b.Variables, spec.Variable{ Key: key, Value: val, }) } ctx := t.Context() if tc.jobTimeout > 0 { var cancel func() ctx, cancel = context.WithTimeout(ctx, tc.jobTimeout) defer cancel() } for key, ctxProvider := range b.getStageTimeoutContexts(ctx, defaultTimeouts...) { ctx, cancel := ctxProvider() defer cancel() deadline, _ := ctx.Deadline() if !deadline.IsZero() { assert.WithinDuration(t, time.Now().Add(tc.expected[key]), deadline, time.Second, key) } } if len(tc.contains) == 0 { assert.Empty(t, logs.String()) } else { for i := range tc.contains { assert.Contains(t, logs.String(), tc.contains[i]) } } }) } } func Test_logUsedImages(t *testing.T) { const ( testImage1 = "test_image:latest" testImage2 = "service_image:v1.0" testImage3 = "registry.gitlab.example.com/my/project/image@sha256:123456" testPlatform = "platform" ) tests := map[string]struct { featureOn bool image spec.Image services spec.Services assertImages func(t *testing.T, images []string, platforms []string) }{ "FF disabled": { featureOn: false, image: spec.Image{Name: testImage1}, services: spec.Services{ {Name: testImage2}, {Name: testImage3}, }, assertImages: func(t *testing.T, images []string, _ []string) { assert.Empty(t, images) }, }, "no images defined": { featureOn: true, assertImages: func(t *testing.T, images []string, _ []string) { assert.Empty(t, images) }, }, "job image defined": { featureOn: true, image: spec.Image{ Name: testImage1, ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ Platform: testPlatform, }, }, }, assertImages: func(t *testing.T, images []string, platforms []string) { assert.Len(t, images, 1) assert.Contains(t, images, testImage1) assert.Len(t, platforms, 1) assert.Contains(t, platforms, testPlatform) }, }, "service images defined": { featureOn: true, services: spec.Services{ {Name: testImage1}, { Name: testImage2, ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ Platform: testPlatform, }, }, }, }, assertImages: func(t *testing.T, images []string, platforms []string) { assert.Len(t, images, 2) assert.Contains(t, images, testImage1) assert.Contains(t, images, testImage2) assert.Len(t, platforms, 1) assert.Contains(t, platforms, testPlatform) }, }, "all images defined": { featureOn: true, image: spec.Image{Name: testImage1}, services: spec.Services{ {Name: testImage2}, {Name: testImage3}, }, assertImages: func(t *testing.T, images []string, _ []string) { assert.Len(t, images, 3) assert.Contains(t, images, testImage1) assert.Contains(t, images, testImage2) assert.Contains(t, images, testImage3) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { logger, hook := test.NewNullLogger() b := &Build{ Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: map[string]bool{ featureflags.LogImagesConfiguredForJob: tt.featureOn, }, }, RunnerCredentials: RunnerCredentials{ Logger: logger, }, }, Job: spec.Job{ Image: tt.image, Services: tt.services, }, } b.logUsedImages() var images []string var platforms []string for _, entry := range hook.AllEntries() { image, ok := entry.Data["image_name"] if !ok { continue } images = append(images, image.(string)) platform, ok := entry.Data["image_platform"] if !ok { continue } platforms = append(platforms, platform.(string)) } tt.assertImages(t, images, platforms) }) } } func TestBuildStageMetrics(t *testing.T) { p := setupSuccessfulMockExecutor(t, func(options ExecutorPrepareOptions) error { return nil }) rc := &RunnerConfig{} build := registerExecutorWithSuccessfulBuild(t, p, rc) build.Runner.Environment = append(build.Runner.Environment, fmt.Sprintf("%s=true", featureflags.ExportHighCardinalityMetrics)) // each expected build stage should be called twice, for start and for end stagesMap := make(map[BuildStage]int) stageFn := func(stage BuildStage) { stagesMap[stage]++ } build.OnBuildStageStartFn = stageFn build.OnBuildStageEndFn = stageFn err := build.Run(&Config{}, &Trace{Writer: os.Stdout}) assert.NoError(t, err) expectedStages := []BuildStage{ BuildStagePrepare, BuildStagePrepareExecutor, BuildStageRestoreCache, BuildStageUploadOnSuccessArtifacts, BuildStageGetSources, BuildStageDownloadArtifacts, BuildStageCleanup, BuildStageAfterScript, BuildStageArchiveOnSuccessCache, BuildStage("step_script"), } for _, s := range expectedStages { assert.Equal(t, stagesMap[s], 2) delete(stagesMap, s) } assert.Len(t, stagesMap, 0) } func TestBuild_DispatchedJobExecutionMode(t *testing.T) { build := Build{ Runner: &RunnerConfig{}, Job: spec.Job{ Run: spec.Run{{}}, Variables: spec.Variables{ { Key: featureflags.UseScriptToStepMigration, Value: "true", }, }, }, ExecutorFeatures: FeaturesInfo{ NativeStepsIntegration: false, }, } assert.Equal(t, JobExecutionModeTraditional, build.DispatchedJobExecutionMode()) build.markStepDispatchedInScript() assert.Equal(t, JobExecutionModeSteps, build.DispatchedJobExecutionMode()) } func TestBuildStageMetricsFailBuild(t *testing.T) { executor, provider := setupMockExecutorAndProvider(t) executor.On("Prepare", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() executor.On("Cleanup").Once() // Set up a failing a build script thrownErr := &BuildError{Inner: errors.New("test error"), ExitCode: 1} executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}) executor.On("Run", matchBuildStage(BuildStagePrepare)).Return(nil).Once() executor.On("Run", mock.Anything).Return(thrownErr).Times(3) executor.On("Run", matchBuildStage(BuildStageCleanup)).Return(nil).Once() executor.On("Finish", thrownErr).Once() failedBuild, err := GetFailedBuild() assert.NoError(t, err) build := &Build{ Job: failedBuild, Runner: &RunnerConfig{ RunnerSettings: RunnerSettings{ Executor: t.Name(), }, }, ExecutorProvider: provider, } build.Runner.Environment = append(build.Runner.Environment, fmt.Sprintf("%s=true", featureflags.ExportHighCardinalityMetrics)) // each expected build stage should be called twice, for start and for end stagesMap := make(map[BuildStage]int) stageFn := func(stage BuildStage) { stagesMap[stage]++ } build.OnBuildStageStartFn = stageFn build.OnBuildStageEndFn = stageFn err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) expectedErr := new(BuildError) assert.ErrorIs(t, err, expectedErr) expectedStages := []BuildStage{ BuildStageArchiveOnFailureCache, BuildStageCleanup, BuildStageGetSources, BuildStagePrepare, BuildStagePrepareExecutor, BuildStageUploadOnFailureArtifacts, } for _, s := range expectedStages { assert.Equal(t, stagesMap[s], 2) delete(stagesMap, s) } assert.Len(t, stagesMap, 0) } func TestBuildDurationsAndBoundaryTimes(t *testing.T) { p := NewMockExecutorProvider(t) rc := new(RunnerConfig) rc.RunnerSettings.Executor = t.Name() build, err := NewBuild(spec.Job{}, rc, nil, nil, p) require.NoError(t, err) startedAt1 := build.StartedAt() assert.False(t, startedAt1.IsZero(), "StartedAt should not be a zero-time") assert.True(t, build.FinishedAt().IsZero(), "FinishedAt should be a zero-time") time.Sleep(10 * time.Millisecond) currentDuration1 := build.CurrentDuration() assert.True(t, currentDuration1 >= 10*time.Millisecond, "Current job duration should be greater tha 10ms") time.Sleep(10 * time.Millisecond) currentDuration2 := build.CurrentDuration() assert.True(t, currentDuration2 >= 20*time.Millisecond, "Current job duration should be greater tha 20ms") assert.NotEqual(t, currentDuration1, currentDuration2, "Subsequent CurrentDuration() values shouldn't be equal") time.Sleep(10 * time.Millisecond) assert.Equal(t, time.Duration(0), build.FinalDuration(), "If ensureFinishedAt() wasn't called, final duration should be equal to 0") // Mark the job as finished! build.ensureFinishedAt() finalDuration1 := build.FinalDuration() finishedAt1 := build.FinishedAt() assert.True(t, finalDuration1 >= 30*time.Millisecond, "Final duration should be greater than 30ms") assert.False(t, finishedAt1.IsZero(), "FinishedAt should not be a zero-time") time.Sleep(10 * time.Millisecond) startedAt2 := build.StartedAt() finishedAt2 := build.FinishedAt() finalDuration2 := build.FinalDuration() assert.Equal(t, finalDuration1, finalDuration2, "Subsequent FinalDuration() values should be equal") assert.Equal(t, finishedAt1, finishedAt2, "FinishedAt() should not change") assert.Equal(t, startedAt1, startedAt2, "StartedAt() should not change") } func TestBuild_RunCallsEnsureFinishedAt(t *testing.T) { tests := map[string]struct { executorRunError error assertError func(t *testing.T, err error) }{ "succeeded job": { executorRunError: nil, }, "failed job": { executorRunError: assert.AnError, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, assert.AnError) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { executor := NewMockExecutor(t) executor.EXPECT().Prepare(mock.Anything).Return(nil) executor.EXPECT(). Run(mock.Anything). Run(func(cmd ExecutorCommand) { time.Sleep(1 * time.Millisecond) }). Return(tt.executorRunError) executor.EXPECT().Shell().Return(&ShellScriptInfo{Shell: "script-shell"}).Maybe() executor.EXPECT().Finish(mock.Anything) executor.EXPECT().Cleanup() ep := NewMockExecutorProvider(t) ep.EXPECT().GetFeatures(mock.Anything).Return(nil) ep.EXPECT().Create().Return(executor) rc := new(RunnerConfig) rc.RunnerSettings.Executor = t.Name() interrupt := make(chan os.Signal, 1) build, err := NewBuild(spec.Job{}, rc, interrupt, nil, ep) require.NoError(t, err) // Some of the job execution steps use the configurable number of attempts // before they report failure. That includes, for example, the predefined // get_sources step. // For these steps, the loop that handles subsequent attempts may use // the exponential backoff delay, when the FF is set to true, which is true. // That is done, unfortunately, even when there is only one attempt to be // executed. // As the tests here are returning error early (which includes also context // cancel caused by simulating job cancel or runner process interrupt), this // backoff causes an additional 5 seconds delay, that we don't need here. // By disabling the feature flag, we speed up the tests. build.initSettings() build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = false require.Zero(t, build.finishedAt) trace := NewMockLightJobTrace(t) trace.EXPECT().SetAbortFunc(mock.Anything) trace.EXPECT().SetCancelFunc(mock.AnythingOfType("context.CancelFunc")).Maybe() trace.EXPECT().IsStdout().Return(false) trace.EXPECT().Fail(mock.Anything, mock.Anything).Return(nil).Maybe() trace.EXPECT().Success().Return(nil).Maybe() trace.EXPECT().SetSupportedFailureReasonMapper(mock.Anything).Maybe() l := logrus.New() lh := test.NewLocal(l) build.Runner.RunnerCredentials.Logger = l err = build.Run(&Config{}, trace) if tt.assertError != nil { tt.assertError(t, err) } else { assert.NoError(t, err) } for _, e := range lh.AllEntries() { if !strings.Contains(e.Message, "Job succeeded") && !strings.Contains(e.Message, "Job failed") { continue } if assert.Contains(t, e.Data, "duration_s") { assert.Greater(t, e.Data["duration_s"], float64(0)) } } assert.NotZero(t, build.finishedAt) }) } } func TestBuildIsProtected(t *testing.T) { const protectedVarName = "CI_COMMIT_REF_PROTECTED" someFalse, someTrue := false, true tests := []struct { name string gitInfoProtected *bool variables spec.Variables expected bool }{ { name: "no config", }, { name: "non-protected via GitInfo", gitInfoProtected: &someFalse, variables: spec.Variables{{Key: protectedVarName, Value: "true"}}, }, { name: "protected via GitInfo", gitInfoProtected: &someTrue, variables: spec.Variables{{Key: protectedVarName, Value: "false"}}, expected: true, }, { name: "non-protected via JobVariables", variables: spec.Variables{{Key: protectedVarName, Value: "false"}}, }, { name: "protected via JobVariables", variables: spec.Variables{{Key: protectedVarName, Value: "true"}}, expected: true, }, { name: "non-protected via JobVariables, multiple vars", variables: spec.Variables{ {Key: protectedVarName, Value: "false"}, {Key: protectedVarName, Value: "true"}, }, }, { name: "protected via JobVariables, multiple vars", variables: spec.Variables{ {Key: protectedVarName, Value: "true"}, {Key: protectedVarName, Value: "false"}, }, expected: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { build := &Build{ Job: spec.Job{ Variables: test.variables, GitInfo: spec.GitInfo{ Protected: test.gitInfoProtected, }, }, } actual := build.IsProtected() assert.Equal(t, test.expected, actual) }) } } func TestExpandingInputs(t *testing.T) { inputs, err := spec.NewJobInputs([]spec.JobInput{ { Key: "any_input", Value: spec.JobInputValue{ Type: spec.JobInputContentTypeNameString, Content: value.String("any-value"), Sensitive: false, }, }, }) require.NoError(t, err) setup := func(t *testing.T) ExecutorProvider { t.Helper() return setupSuccessfulMockExecutor(t, func(options ExecutorPrepareOptions) error { return nil }) } run := func(t *testing.T, job spec.Job, ffEnabled bool, p ExecutorProvider) *Build { build, err := NewBuild( job, &RunnerConfig{RunnerSettings: RunnerSettings{ Executor: t.Name(), FeatureFlags: map[string]bool{featureflags.EnableJobInputsInterpolation: ffEnabled}, }}, nil, nil, p, ) require.NoError(t, err) err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) require.NoError(t, err) return build } t.Run("fail to expand inputs", func(t *testing.T) { p := NewMockExecutorProvider(t) job := spec.Job{ Inputs: inputs, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'Input is: ${{ job.inputs.any_input + }}'"}, When: spec.StepWhenAlways, }, }, } build, err := NewBuild( job, &RunnerConfig{RunnerSettings: RunnerSettings{ Executor: t.Name(), FeatureFlags: map[string]bool{featureflags.EnableJobInputsInterpolation: true}, }}, nil, nil, p, ) require.NoError(t, err) err = build.Run(&Config{}, &Trace{Writer: os.Stdout}) et := &BuildError{} require.ErrorAs(t, err, &et) assert.Equal(t, ConfigurationError, et.FailureReason) etInner := &spec.InputInterpolationError{} assert.ErrorAs(t, et.Inner, &etInner) }) t.Run("expand inputs in step script", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'Input is: ${{ job.inputs.any_input }}'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "echo 'Input is: any-value'", build.Steps[0].Script[0]) }) t.Run("do not expand inputs in step script with FF disabled", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'Input is: ${{ job.inputs.any_input }}'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, false, p) assert.Equal(t, "echo 'Input is: ${{ job.inputs.any_input }}'", build.Steps[0].Script[0]) }) t.Run("expand inputs in step after_script", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{""}, When: spec.StepWhenAlways, }, { Name: spec.StepNameAfterScript, Script: spec.StepScript{"echo 'Input is: ${{ job.inputs.any_input }}'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "echo 'Input is: any-value'", build.Steps[1].Script[0]) }) t.Run("expand inputs in image name", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "${{ job.inputs.any_input }}-image:latest", }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "any-value-image:latest", build.Image.Name) }) t.Run("expand inputs in image entrypoint", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", Entrypoint: []string{"/bin/sh", "-c", "echo ${{ job.inputs.any_input }}"}, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, []string{"/bin/sh", "-c", "echo any-value"}, build.Image.Entrypoint) }) t.Run("expand inputs in image command", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", Command: []string{ "/bin/sh", "-c", "echo ${{ job.inputs.any_input }}", "start-${{ job.inputs.any_input }}", }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := []string{ "/bin/sh", "-c", "echo any-value", "start-any-value", } assert.Equal(t, expected, build.Image.Command) }) t.Run("expand inputs in docker platform", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ Platform: "linux/${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "linux/any-value", build.Image.ExecutorOptions.Docker.Platform) }) t.Run("expand inputs in docker user", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ User: spec.StringOrInt64("${{ job.inputs.any_input }}"), }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.StringOrInt64("any-value"), build.Image.ExecutorOptions.Docker.User) }) t.Run("expand inputs in kubernetes user", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", ExecutorOptions: spec.ImageExecutorOptions{ Kubernetes: spec.ImageKubernetesOptions{ User: spec.StringOrInt64("${{ job.inputs.any_input }}"), }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.StringOrInt64("any-value"), build.Image.ExecutorOptions.Kubernetes.User) }) t.Run("expand inputs in pull policies", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Image: spec.Image{ Name: "alpine:latest", PullPolicies: []spec.PullPolicy{"${{ job.inputs.any_input }}-if-not-present"}, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, []DockerPullPolicy{"any-value-if-not-present"}, build.Image.PullPolicies) }) t.Run("expand inputs in cache key", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Cache: spec.Caches{ { Key: "${{ job.inputs.any_input }}-cache-key", }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "any-value-cache-key", build.Cache[0].Key) }) t.Run("expand inputs in cache fallback keys", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Cache: spec.Caches{ { Key: "main-cache-key", FallbackKeys: []string{ "${{ job.inputs.any_input }}-fallback-1", "fallback-${{ job.inputs.any_input }}-2", "${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := spec.CacheFallbackKeys{ "any-value-fallback-1", "fallback-any-value-2", "any-value", } assert.Equal(t, expected, build.Cache[0].FallbackKeys) }) t.Run("expand inputs in cache paths", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Cache: spec.Caches{ { Key: "cache-key", Paths: []string{ "${{ job.inputs.any_input }}/cache", "build/${{ job.inputs.any_input }}", "${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := spec.ArtifactPaths{ "any-value/cache", "build/any-value", "any-value", } assert.Equal(t, expected, build.Cache[0].Paths) }) t.Run("expand inputs in cache when", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Cache: spec.Caches{ { Key: "cache-key", When: spec.CacheWhen("on_${{ job.inputs.any_input }}"), }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.CacheWhen("on_any-value"), build.Cache[0].When) }) t.Run("expand inputs in cache policy", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Cache: spec.Caches{ { Key: "cache-key", Policy: spec.CachePolicy("${{ job.inputs.any_input }}-push"), }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.CachePolicy("any-value-push"), build.Cache[0].Policy) }) t.Run("expand inputs in artifact name", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Artifacts: spec.Artifacts{ { Name: "${{ job.inputs.any_input }}-artifact", }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "any-value-artifact", build.Artifacts[0].Name) }) t.Run("expand inputs in artifact paths", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Artifacts: spec.Artifacts{ { Name: "test-artifact", Paths: spec.ArtifactPaths{ "${{ job.inputs.any_input }}/artifacts", "build/${{ job.inputs.any_input }}", "${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := spec.ArtifactPaths{ "any-value/artifacts", "build/any-value", "any-value", } assert.Equal(t, expected, build.Artifacts[0].Paths) }) t.Run("expand inputs in artifact exclude", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Artifacts: spec.Artifacts{ { Name: "test-artifact", Exclude: spec.ArtifactExclude{ "${{ job.inputs.any_input }}/exclude", "temp/${{ job.inputs.any_input }}", "${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := spec.ArtifactExclude{ "any-value/exclude", "temp/any-value", "any-value", } assert.Equal(t, expected, build.Artifacts[0].Exclude) }) t.Run("expand inputs in artifact expire_in", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Artifacts: spec.Artifacts{ { Name: "test-artifact", ExpireIn: "${{ job.inputs.any_input }} days", }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "any-value days", build.Artifacts[0].ExpireIn) }) t.Run("expand inputs in artifact when", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Artifacts: spec.Artifacts{ { Name: "test-artifact", When: spec.ArtifactWhen("on_${{ job.inputs.any_input }}"), }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.ArtifactWhen("on_any-value"), build.Artifacts[0].When) }) t.Run("expand inputs in service name", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "${{ job.inputs.any_input }}-service:latest", }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "any-value-service:latest", build.Services[0].Name) }) t.Run("expand inputs in service entrypoint", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", Entrypoint: []string{"/bin/sh", "-c", "echo ${{ job.inputs.any_input }}"}, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, []string{"/bin/sh", "-c", "echo any-value"}, build.Services[0].Entrypoint) }) t.Run("expand inputs in service docker platform", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ Platform: "linux/${{ job.inputs.any_input }}", }, }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, "linux/any-value", build.Services[0].ExecutorOptions.Docker.Platform) }) t.Run("expand inputs in service docker user", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ User: spec.StringOrInt64("${{ job.inputs.any_input }}"), }, }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.StringOrInt64("any-value"), build.Services[0].ExecutorOptions.Docker.User) }) t.Run("expand inputs in service kubernetes user", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", ExecutorOptions: spec.ImageExecutorOptions{ Kubernetes: spec.ImageKubernetesOptions{ User: spec.StringOrInt64("${{ job.inputs.any_input }}"), }, }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, spec.StringOrInt64("any-value"), build.Services[0].ExecutorOptions.Kubernetes.User) }) t.Run("expand inputs in service pull policies", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", PullPolicies: []spec.PullPolicy{"${{ job.inputs.any_input }}-if-not-present"}, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) assert.Equal(t, []spec.PullPolicy{"any-value-if-not-present"}, build.Services[0].PullPolicies) }) t.Run("expand inputs in service command", func(t *testing.T) { p := setup(t) job := spec.Job{ Inputs: inputs, Services: spec.Services{ { Name: "postgres:latest", Command: []string{ "/bin/sh", "-c", "echo ${{ job.inputs.any_input }}", "start-${{ job.inputs.any_input }}", }, }, }, Steps: spec.Steps{ { Name: spec.StepNameScript, Script: spec.StepScript{"echo 'test'"}, When: spec.StepWhenAlways, }, }, } build := run(t, job, true, p) expected := []string{ "/bin/sh", "-c", "echo any-value", "start-any-value", } assert.Equal(t, expected, build.Services[0].Command) }) } func TestBuild_attemptExecuteStage(t *testing.T) { tests := []struct { name string attempts int featureFlagEnabled bool shouldRetry bool expectedRetryMessage bool expectedRetryCount int executorFailurePattern []bool // true = fail, false = succeed }{ { name: "single attempt with failure - no retry message", attempts: 1, featureFlagEnabled: true, shouldRetry: false, expectedRetryMessage: false, expectedRetryCount: 0, executorFailurePattern: []bool{true}, }, { name: "two attempts with failure on first - shows retry message", attempts: 2, featureFlagEnabled: true, shouldRetry: true, expectedRetryMessage: true, expectedRetryCount: 1, executorFailurePattern: []bool{true, true}, }, { name: "three attempts with failures - shows retry message twice", attempts: 3, featureFlagEnabled: true, shouldRetry: true, expectedRetryMessage: true, expectedRetryCount: 2, executorFailurePattern: []bool{true, true, true}, }, { name: "two attempts success on second - shows retry message once", attempts: 2, featureFlagEnabled: true, shouldRetry: true, expectedRetryMessage: true, expectedRetryCount: 1, executorFailurePattern: []bool{true, false}, }, { name: "three attempts with feature flag disabled - no retry message", attempts: 3, featureFlagEnabled: false, shouldRetry: false, expectedRetryMessage: false, expectedRetryCount: 0, executorFailurePattern: []bool{true, true, true}, }, { name: "single attempt with success - no retry message", attempts: 1, featureFlagEnabled: true, shouldRetry: false, expectedRetryMessage: false, expectedRetryCount: 0, executorFailurePattern: []bool{false}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Set up logger with test hook to capture log messages logger := logrus.New() hook := test.NewLocal(logger) // Create a mock executor executor := NewMockExecutor(t) executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Maybe() // Set up the executor to fail or succeed based on the pattern for _, shouldFail := range tt.executorFailurePattern { if shouldFail { executor.On("Run", mock.Anything).Return(errors.New("simulated failure")).Once() } else { executor.On("Run", mock.Anything).Return(nil).Once() } } // Create a build with the specified configuration build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Logger: logger, }, }, Job: spec.Job{ Variables: spec.Variables{}, }, logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } // Initialize settings build.initSettings() // Set the feature flag if tt.featureFlagEnabled { build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = true } else { build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = false } // Call attemptExecuteStage ctx := t.Context() err := build.attemptExecuteStage(ctx, BuildStageGetSources, executor, tt.attempts, nil) // Verify the error state if tt.executorFailurePattern[len(tt.executorFailurePattern)-1] { assert.Error(t, err, "Expected error when final attempt fails") } else { assert.NoError(t, err, "Expected no error when an attempt succeeds") } // Count retry messages in the logs retryMessageCount := 0 for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, "Retrying in") { retryMessageCount++ } } // Verify retry message behavior if tt.expectedRetryMessage { assert.Equal(t, tt.expectedRetryCount, retryMessageCount, "Expected %d retry messages but found %d", tt.expectedRetryCount, retryMessageCount) } else { assert.Equal(t, 0, retryMessageCount, "Expected no retry messages but found %d", retryMessageCount) } // Verify all expected calls were made executor.AssertExpectations(t) }) } } func TestBuild_attemptExecuteStageWithRetryCallback(t *testing.T) { tests := []struct { name string attempts int retryCallbackError bool expectedRetryMessage bool }{ { name: "retry callback succeeds - stage executes", attempts: 2, retryCallbackError: false, expectedRetryMessage: true, }, { name: "retry callback fails - stage skipped", attempts: 2, retryCallbackError: true, expectedRetryMessage: true, // First attempt fails and prints retry message before callback error on attempt 1 }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Set up logger with test hook logger := logrus.New() hook := test.NewLocal(logger) // Create a mock executor executor := NewMockExecutor(t) executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Maybe() // If callback succeeds, executor will be called for all attempts // If callback fails, it only fails after attempt 0, so executor runs once if !tt.retryCallbackError { executor.On("Run", mock.Anything).Return(errors.New("simulated failure")).Times(tt.attempts) } else { executor.On("Run", mock.Anything).Return(errors.New("simulated failure")).Once() } // Create build build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Logger: logger, }, }, Job: spec.Job{ Variables: spec.Variables{}, }, logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } build.initSettings() build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = true // Create retry callback retryCallback := func(attempt int) error { if tt.retryCallbackError && attempt > 0 { return errors.New("retry callback error") } return nil } // Call attemptExecuteStage with retry callback ctx := t.Context() err := build.attemptExecuteStage(ctx, BuildStageGetSources, executor, tt.attempts, retryCallback) // Should always have an error since we're simulating failures assert.Error(t, err) // Count retry messages retryMessageCount := 0 for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, "Retrying in") { retryMessageCount++ } } if tt.expectedRetryMessage { assert.Greater(t, retryMessageCount, 0, "Expected at least one retry message") } else { assert.Equal(t, 0, retryMessageCount, "Expected no retry messages") } executor.AssertExpectations(t) }) } } func TestBuild_attemptExecuteStageExponentialBackoff(t *testing.T) { // Skip this test in short mode as it tests actual timing if testing.Short() { t.Skip("Skipping timing test in short mode") } // This test verifies that the exponential backoff actually waits between retries logger := logrus.New() hook := test.NewLocal(logger) executor := NewMockExecutor(t) executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Maybe() executor.On("Run", mock.Anything).Return(errors.New("failure")).Times(3) build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Logger: logger, }, }, Job: spec.Job{ Variables: spec.Variables{}, }, logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } build.initSettings() build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = true ctx := t.Context() startTime := time.Now() err := build.attemptExecuteStage(ctx, BuildStageGetSources, executor, 3, nil) elapsed := time.Since(startTime) require.Error(t, err) // With 3 attempts, we should have 2 retries // First retry: ~5s, Second retry: ~7.5s (5 * 1.5) // Total should be at least 10s (allowing for some variance) assert.Greater(t, elapsed, 10*time.Second, "Expected exponential backoff delays") // Verify we got 2 retry messages retryMessageCount := 0 for _, entry := range hook.AllEntries() { if strings.Contains(entry.Message, "Retrying in") { retryMessageCount++ } } assert.Equal(t, 2, retryMessageCount) executor.AssertExpectations(t) } func TestBuild_attemptExecuteStageInvalidAttempts(t *testing.T) { tests := []struct { name string attempts int wantErr bool }{ { name: "zero attempts - invalid", attempts: 0, wantErr: true, }, { name: "negative attempts - invalid", attempts: -1, wantErr: true, }, { name: "eleven attempts - invalid", attempts: 11, wantErr: true, }, { name: "one attempt - valid", attempts: 1, wantErr: false, }, { name: "ten attempts - valid", attempts: 10, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { logger := logrus.New() executor := NewMockExecutor(t) executor.On("Shell").Return(&ShellScriptInfo{Shell: "script-shell"}).Maybe() if !tt.wantErr { executor.On("Run", mock.Anything).Return(nil).Maybe() } build := &Build{ Runner: &RunnerConfig{ RunnerCredentials: RunnerCredentials{ Logger: logger, }, }, Job: spec.Job{ Variables: spec.Variables{}, }, } build.initSettings() build.buildSettings.FeatureFlags[featureflags.UseExponentialBackoffStageRetry] = true ctx := t.Context() err := build.attemptExecuteStage(ctx, BuildStageGetSources, executor, tt.attempts, nil) if tt.wantErr { assert.Error(t, err) assert.Contains(t, err.Error(), "out of the range [1, 10]") } else { assert.NoError(t, err) } executor.AssertExpectations(t) }) } } func TestPredefinedServerVariables(t *testing.T) { // predefinedServerJobVariables are variables that _only_ come from the CI // server. // // This list was extracted from: // https://docs.gitlab.com/ci/variables/predefined_variables/#predefined-environment-variables-reference // // handy console js: // console.log(Object.values($("tr td:first-child code").map((_, val) => val.innerText)).join("\n")) // // commented out variables are non-server ci variables, they are handy to keep // here for reference/future update updating. var predefinedServerJobVariables = []string{ "CHAT_CHANNEL", "CHAT_INPUT", "CI", "CI_API_V4_URL", // "CI_BUILDS_DIR", "CI_COMMIT_BEFORE_SHA", "CI_COMMIT_DESCRIPTION", "CI_COMMIT_MESSAGE", "CI_COMMIT_REF_NAME", "CI_COMMIT_REF_PROTECTED", "CI_COMMIT_REF_SLUG", "CI_COMMIT_SHA", "CI_COMMIT_SHORT_SHA", "CI_COMMIT_BRANCH", "CI_COMMIT_TAG", "CI_COMMIT_TITLE", "CI_COMMIT_TIMESTAMP", // "CI_CONCURRENT_ID", // "CI_CONCURRENT_PROJECT_ID", "CI_CONFIG_PATH", "CI_DEBUG_TRACE", "CI_DEFAULT_BRANCH", "CI_DEPLOY_FREEZE", "CI_DEPLOY_PASSWORD", "CI_DEPLOY_USER", // "CI_DISPOSABLE_ENVIRONMENT", "CI_ENVIRONMENT_NAME", "CI_ENVIRONMENT_SLUG", "CI_ENVIRONMENT_URL", "CI_EXTERNAL_PULL_REQUEST_IID", "CI_EXTERNAL_PULL_REQUEST_SOURCE_REPOSITORY", "CI_EXTERNAL_PULL_REQUEST_TARGET_REPOSITORY", "CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_NAME", "CI_EXTERNAL_PULL_REQUEST_SOURCE_BRANCH_SHA", "CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_NAME", "CI_EXTERNAL_PULL_REQUEST_TARGET_BRANCH_SHA", "CI_HAS_OPEN_REQUIREMENTS", "CI_JOB_ID", "CI_JOB_IMAGE", "CI_JOB_MANUAL", "CI_JOB_NAME", "CI_JOB_STAGE", "CI_JOB_TOKEN", "CI_JOB_JWT", "CI_JOB_URL", "CI_KUBERNETES_ACTIVE", "CI_MERGE_REQUEST_ASSIGNEES", "CI_MERGE_REQUEST_ID", "CI_MERGE_REQUEST_IID", "CI_MERGE_REQUEST_LABELS", "CI_MERGE_REQUEST_MILESTONE", "CI_MERGE_REQUEST_PROJECT_ID", "CI_MERGE_REQUEST_PROJECT_PATH", "CI_MERGE_REQUEST_PROJECT_URL", "CI_MERGE_REQUEST_REF_PATH", "CI_MERGE_REQUEST_SOURCE_BRANCH_NAME", "CI_MERGE_REQUEST_SOURCE_BRANCH_SHA", "CI_MERGE_REQUEST_SOURCE_PROJECT_ID", "CI_MERGE_REQUEST_SOURCE_PROJECT_PATH", "CI_MERGE_REQUEST_SOURCE_PROJECT_URL", "CI_MERGE_REQUEST_TARGET_BRANCH_NAME", "CI_MERGE_REQUEST_TARGET_BRANCH_SHA", "CI_MERGE_REQUEST_TITLE", "CI_MERGE_REQUEST_EVENT_TYPE", "CI_NODE_INDEX", "CI_NODE_TOTAL", "CI_PAGES_DOMAIN", "CI_PAGES_URL", "CI_PIPELINE_ID", "CI_PIPELINE_IID", "CI_PIPELINE_SOURCE", "CI_PIPELINE_TRIGGERED", "CI_PIPELINE_URL", // "CI_PROJECT_DIR", "CI_PROJECT_ID", "CI_PROJECT_NAME", "CI_PROJECT_NAMESPACE", "CI_PROJECT_ROOT_NAMESPACE", "CI_PROJECT_PATH", "CI_PROJECT_PATH_SLUG", "CI_PROJECT_REPOSITORY_LANGUAGES", "CI_PROJECT_TITLE", "CI_PROJECT_URL", "CI_PROJECT_VISIBILITY", "CI_REGISTRY", "CI_REGISTRY_IMAGE", "CI_REGISTRY_PASSWORD", "CI_REGISTRY_USER", "CI_REPOSITORY_URL", "CI_RUNNER_DESCRIPTION", // "CI_RUNNER_EXECUTABLE_ARCH", "CI_RUNNER_ID", // "CI_RUNNER_REVISION", "CI_RUNNER_SHORT_TOKEN", "CI_RUNNER_TAGS", // "CI_RUNNER_VERSION", // "CI_SERVER", "CI_SERVER_URL", "CI_SERVER_HOST", "CI_SERVER_PORT", "CI_SERVER_PROTOCOL", "CI_SERVER_NAME", "CI_SERVER_REVISION", "CI_SERVER_VERSION", "CI_SERVER_VERSION_MAJOR", "CI_SERVER_VERSION_MINOR", "CI_SERVER_VERSION_PATCH", "CI_SHARED_ENVIRONMENT", "GITLAB_CI", "GITLAB_FEATURES", "GITLAB_USER_EMAIL", "GITLAB_USER_ID", "GITLAB_USER_LOGIN", "GITLAB_USER_NAME", } build := &Build{} for _, v := range build.GetAllVariables() { for _, predefined := range predefinedServerJobVariables { assert.NotEqual( t, predefined, v.Key, "%s is a predefined server variable and should not be set by runner", predefined, ) } } } func TestWrapStepStageErr_NormalizesWindowsExitCode(t *testing.T) { err := fmt.Errorf("step failed: exit status 4294967295") berr, ok := wrapStepStageErr(err).(*BuildError) require.True(t, ok, "expected *BuildError") assert.Equal(t, -1, berr.ExitCode) } func Test_wrapStepStageErr(t *testing.T) { tests := map[string]struct { err error expectedNil bool expectedReason spec.JobFailureReason }{ "nil error": { err: nil, expectedNil: true, }, "ErrNoStepRunnerButOkay": { err: steps.ErrNoStepRunnerButOkay, expectedNil: true, }, "client internal error": { err: fmt.Errorf("wrapping: %w", &steps.ClientInternalError{ Err: errors.New("run request failed for job \"123\": rpc error: code = Internal desc = panic in /step.StepRunner/Run"), }), expectedReason: ScriptFailure, }, "client status error with ErrorStepFailure": { err: fmt.Errorf("executing steps request: %w", &steps.ClientStatusError{ Status: client.Status{State: client.StateFailure, ErrorKind: client.ErrorStepFailure}, Err: errors.New("step failed"), }), expectedReason: ScriptFailure, }, "client status error with ErrorInternal": { err: fmt.Errorf("executing steps request: %w", &steps.ClientStatusError{ Status: client.Status{State: client.StateFailure, ErrorKind: client.ErrorInternal}, Err: errors.New("panic in step function"), }), expectedReason: ScriptFailure, }, "client status error with ErrorUnknown": { err: fmt.Errorf("executing steps request: %w", &steps.ClientStatusError{ Status: client.Status{State: client.StateUnspecified, ErrorKind: client.ErrorUnknown}, Err: errors.New("unspecified"), }), expectedReason: UnknownFailure, }, "client status error with ErrorCancelled maps to JobCanceled": { err: fmt.Errorf("executing steps request: %w", &steps.ClientStatusError{ Status: client.Status{State: client.StateCancelled, ErrorKind: client.ErrorCancelled}, Err: errors.New("cancelled"), }), expectedReason: JobCanceled, }, "plain error": { err: fmt.Errorf("executing steps request: %w", errors.New("something broke")), expectedReason: "", }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { result := wrapStepStageErr(tc.err) if tc.expectedNil { assert.Nil(t, result) return } require.NotNil(t, result) var berr *BuildError require.ErrorAs(t, result, &berr) assert.Equal(t, tc.expectedReason, berr.FailureReason) }) } } // TestBuild_executeStepStage_ForwardsRegisterCancel asserts the wiring this // branch introduces: the registerCancel parameter on executeStepStage is // handed through to steps.Options.RegisterCancel, and the callback that // Execute then registers is the same one a JobTrace would receive via // SetCancelFunc. A regression here (e.g. dropping the field while plumbing // Options) would silently disable user-cancellation for the concrete path, // so guard it explicitly rather than rely on integration coverage. func TestBuild_executeStepStage_ForwardsRegisterCancel(t *testing.T) { server := stepstest.New(t) logger, _ := test.NewNullLogger() build := &Build{ Job: spec.Job{ID: 4242}, Runner: &RunnerConfig{}, // Pre-populate variables so GetAllVariables short-circuits and // doesn't depend on Settings()/feature-flag resolution that the // test isn't exercising. allVariables: spec.Variables{}, BuildDir: t.TempDir(), logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } registered := make(chan context.CancelFunc, 1) registerCancel := func(cb context.CancelFunc) { registered <- cb } // Execute blocks on the fake server's FollowLogs until Cancel arrives, // so trigger the registered callback as soon as it appears. This proves // (a) registerCancel was invoked at all, and (b) the callback it // received drives the Cancel RPC end-to-end. go func() { cb, ok := <-registered if !ok { return } cb() }() ctx, cancel := context.WithTimeout(t.Context(), 10*time.Second) defer cancel() err := build.executeStepStage(ctx, server.Connector(), "test_stage", []schema.Step{}, registerCancel) close(registered) var berr *BuildError require.ErrorAs(t, err, &berr, "cancelled step-runner status must surface as a BuildError") assert.Equal(t, JobCanceled, berr.FailureReason, "executeStepStage must produce JobCanceled when the step-runner reports cancelled") assert.ErrorIs(t, berr.Inner, ErrJobCanceled) assert.Equal(t, []string{strconv.FormatInt(build.ID, 10)}, server.Cancels(), "the registered callback must call Cancel with the build's job ID", ) } // TestBuild_executeStepStage_NilRegisterCancel verifies that the // dispatched-step path (which passes nil) still completes cleanly: no panic // from a nil callback, and steps.Execute exits via context cancellation. func TestBuild_executeStepStage_NilRegisterCancel(t *testing.T) { server := stepstest.New(t) logger, _ := test.NewNullLogger() build := &Build{ Job: spec.Job{ID: 9}, Runner: &RunnerConfig{}, allVariables: spec.Variables{}, BuildDir: t.TempDir(), logger: buildlogger.New(nil, logrus.NewEntry(logger), buildlogger.Options{}), } ctx, cancel := context.WithTimeout(t.Context(), 5*time.Second) defer cancel() // Drive shutdown via context cancel rather than a registered callback. time.AfterFunc(200*time.Millisecond, cancel) err := build.executeStepStage(ctx, server.Connector(), "test_stage", []schema.Step{}, nil) // We don't assert a specific error shape here — context cancellation // during gRPC streaming can surface as several wrapped forms. The // assertion that matters is that the call returned at all without // panicking on the nil registerCancel. _ = err assert.Empty(t, server.Cancels(), "no Cancel RPC should fire when registerCancel is nil") } ================================================ FILE: common/buildlogger/build_logger.go ================================================ package buildlogger import ( "fmt" "io" "sync" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/masker" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/timestamper" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/tokensanitizer" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/urlsanitizer" ) type Trace interface { Write([]byte) (int, error) IsStdout() bool } type Options struct { MaskPhrases []string MaskTokenPrefixes []string Timestamping bool MaskAllDefaultTokens bool TeeOnly bool } const ( Stdout StreamType = 'O' Stderr StreamType = 'E' ) type StreamType byte type Logger struct { internal.Tee base io.WriteCloser closed bool // mu protects w, as Tee's Println, Debugln etc. funcs can be called // throughout the runner from different go routines. mu *sync.Mutex w io.WriteCloser maskPhrases [][]byte maskTokenPrefixes [][]byte timestamping bool } func NewNopCloser(w io.Writer) io.WriteCloser { return internal.NewNopCloser(w) } const ( // StreamExecutorLevel is the stream number for an executor log line StreamExecutorLevel = 0 // StreamWorkLevel is the stream number for a work log line StreamWorkLevel = 1 // StreamStartingServiceLevel is the starting stream number for a service log line StreamStartingServiceLevel = 15 ) func New(log Trace, entry *logrus.Entry, opts Options) Logger { l := Logger{mu: new(sync.Mutex)} l.maskPhrases = internal.Unique(opts.MaskPhrases) l.maskTokenPrefixes = internal.Unique( append(opts.MaskTokenPrefixes, tokensanitizer.DefaultTokenPrefixes(opts.MaskAllDefaultTokens)...), ) l.timestamping = opts.Timestamping if log != nil { l.base = internal.NewNopCloser(log) l.w = l.wrap(l.base, StreamExecutorLevel, Stdout) } l.Tee = internal.NewTee(l.SendRawLog, entry, log != nil && log.IsStdout()) if opts.TeeOnly { l.Tee = l.Tee.WithoutLog() } return l } func (l *Logger) Stream(streamID int, streamType StreamType) io.WriteCloser { // l.base being nil happens when the buildlogger hasn't been created with New() or // a nil was passed for the Trace parameter. This only happens in tests, and to not // panic we simply return a discard writer. if l.base == nil { return internal.NewNopCloser(io.Discard) } return l.wrap(l.base, streamID, streamType) } // wrap wraps the underlying writer with "filters". Order here somewhat // matters, and the order they're instantiated in is the reverse order in which // writes are processed, e.g. last added filter is the first to process data. // // order: // - sync writer to ensure that multiple writes cannot happen concurrently // - mask phrases (masker.New) // - mask sensitive URL parameters (urlsanitizer.New) // - mask secrets with a prefixed token (tokentanitizer.New) // - split log lines and add timestamps (timestamper.New) func (l *Logger) wrap(w io.WriteCloser, streamID int, streamType StreamType) io.WriteCloser { if l.timestamping { w = timestamper.New(w, timestamper.StreamType(streamType), uint8(streamID), true) } w = tokensanitizer.New(w, l.maskTokenPrefixes) w = urlsanitizer.New(w) w = masker.New(w, l.maskPhrases) w = internal.NewSync(w) return w } func (l *Logger) WithFields(fields logrus.Fields) *Logger { return &Logger{ Tee: l.Tee.WithFields(fields), base: l.base, mu: l.mu, w: l.w, maskPhrases: l.maskPhrases, maskTokenPrefixes: l.maskTokenPrefixes, timestamping: l.timestamping, } } func (l *Logger) SendRawLog(args ...any) { if l.w == nil { return } l.mu.Lock() _, _ = fmt.Fprint(l.w, args...) l.mu.Unlock() } func (l *Logger) Close() error { l.mu.Lock() defer l.mu.Unlock() if l.closed { return fmt.Errorf("already closed") } l.closed = true if l.w != nil { return l.w.Close() } return nil } ================================================ FILE: common/buildlogger/build_logger_test.go ================================================ //go:build !integration package buildlogger import ( "bytes" "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) type fakeJobTrace struct { buffer *bytes.Buffer } func (fjt *fakeJobTrace) Write(p []byte) (int, error) { return fjt.buffer.Write(p) } func (fjt *fakeJobTrace) IsStdout() bool { return false } func (fjt *fakeJobTrace) Read() string { return fjt.buffer.String() } func newFakeJobTrace() *fakeJobTrace { buf := new(bytes.Buffer) return &fakeJobTrace{ buffer: buf, } } func newBuildLogger(testName string, jt Trace) Logger { return New(jt, logrus.WithField("test", testName), Options{}) } func runOnHijackedLogrusOutput(t *testing.T, handler func(t *testing.T, output *bytes.Buffer)) { oldOutput := logrus.StandardLogger().Out defer func() { logrus.StandardLogger().Out = oldOutput }() buf := bytes.NewBuffer([]byte{}) logrus.StandardLogger().Out = buf handler(t, buf) } func TestLogLineWithoutSecret(t *testing.T) { runOnHijackedLogrusOutput(t, func(t *testing.T, output *bytes.Buffer) { jt := newFakeJobTrace() l := newBuildLogger("log-line-without-secret", jt) l.Errorln("Fatal: Get http://localhost/?id=123") assert.NoError(t, l.Close()) assert.Contains(t, jt.Read(), `Get http://localhost/?id=123`) assert.Contains(t, output.String(), `Get http://localhost/?id=123`) }) } func TestLogLineWithSecret(t *testing.T) { runOnHijackedLogrusOutput(t, func(t *testing.T, output *bytes.Buffer) { jt := newFakeJobTrace() l := newBuildLogger("log-line-with-secret", jt) l.Errorln("Get http://localhost/?id=123&X-Amz-Signature=abcd1234&private_token=abcd1234") assert.NoError(t, l.Close()) assert.Contains( t, jt.Read(), `Get http://localhost/?id=123&X-Amz-Signature=[MASKED]&private_token=[MASKED]`, ) assert.Contains( t, output.String(), `Get http://localhost/?id=123&X-Amz-Signature=abcd1234&private_token=abcd1234`, ) }) } func TestLogPrinters(t *testing.T) { tests := map[string]struct { entry *logrus.Entry assertion func(t *testing.T, output string) }{ "null writer": { entry: nil, assertion: func(t *testing.T, output string) { assert.Empty(t, output) }, }, "with entry": { entry: logrus.WithField("printer", "test"), assertion: func(t *testing.T, output string) { assert.Contains(t, output, "print\033[0;m\n") assert.Contains(t, output, "info\033[0;m\n") assert.Contains(t, output, "WARNING: warning\033[0;m\n") assert.Contains(t, output, "ERROR: softerror\033[0;m\n") assert.Contains(t, output, "ERROR: error\033[0;m\n") }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { trace := newFakeJobTrace() logger := New(trace, tc.entry, Options{}) logger.Println("print") logger.Infoln("info") logger.Warningln("warning") logger.SoftErrorln("softerror") logger.Errorln("error") require.NoError(t, logger.Close()) tc.assertion(t, trace.Read()) }) } } ================================================ FILE: common/buildlogger/internal/build_logger_fuzz.go ================================================ //go:build gofuzz package internal import ( "bytes" "encoding/binary" "fmt" "io" "math/rand" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/masker" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/tokensanitizer" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal/urlsanitizer" ) type nopWriter struct { io.Writer } func (nopWriter) Close() error { return nil } func Fuzz(data []byte) int { phrases := [][]byte{ bytes.Repeat([]byte{'A'}, 1024), bytes.Repeat([]byte{'B'}, 4*1024), bytes.Repeat([]byte{'C'}, 8*1024), []byte("secret"), []byte("secret_suffix"), []byte("ssecret"), []byte("secrett"), []byte("ssecrett"), } tokenPrefixes := [][]byte{ []byte("secret_prefix"), []byte("secret-prefix"), []byte("secret_prefix-"), []byte("secret-prefix-"), []byte("secret_prefix_"), []byte("secret-prefix_"), } // to be combined with tokenPrefixes secretSuffixes := [][]byte{ []byte("THIS_IS_SECRET"), []byte("ALSO-SECRET"), } buf := new(bytes.Buffer) w := io.WriteCloser(nopWriter{buf}) w = masker.New(w, phrases) w = tokensanitizer.New(w, tokenPrefixes) w = urlsanitizer.New(w) seed := data if len(seed) < 8 { seed = append(seed, make([]byte, 8-len(seed))...) } r := rand.New(rand.NewSource(int64(binary.BigEndian.Uint64(seed)))) // copy fuzz input to new slice, with interspersed mask values at random locations var src []byte chunk(r, data, func(part []byte) { src = append(src, part...) if r.Intn(2) == 1 { src = append(src, phrases[r.Intn(len(phrases))]...) } if r.Intn(2) == 1 { pref := tokenPrefixes[r.Intn(len(tokenPrefixes))] suf := secretSuffixes[r.Intn(len(secretSuffixes))] src = append(src, append(pref, suf...)...) } }) // write src to buffer, but with random sized slices chunk(r, src, func(part []byte) { n, err := w.Write(part) if err != nil { panic(err) } if n != len(part) { panic(fmt.Sprintf("n(%d) < len(part)(%d)", n, len(part))) } }) contents := buf.Bytes() for _, mask := range phrases { if bytes.Contains(contents, mask) { panic(fmt.Sprintf("mask %q present in %q", mask, contents)) } } for _, mask := range secretSuffixes { if bytes.Contains(contents, mask) { panic(fmt.Sprintf("prefix mask %q present in %q", mask, contents)) } } return 0 } func chunk(r *rand.Rand, input []byte, fn func(part []byte)) { for { if len(input) == 0 { break } offset := 1 + r.Intn(len(input)) fn(input[:offset]) input = input[offset:] } } ================================================ FILE: common/buildlogger/internal/masker/masker.go ================================================ // Package masker implements a masking Writer, where specified phrases are // replaced with the word "[MASKED]". // // To achieve masking over Write() boundaries, each phrase has its own writer. // These writers are stacked, with each one calling the next, in length order, // starting with the longest. This allows each writer to scan for their phrase // in-turn, filtering data down to the next writer as required. // // Each mask writer tracks when its phrase is being written, and counts until // either it's matched all bytes of the phrase, and then replaces it, or if a // full match isn't found, sends the matched bytes to the next writer // unmodified. package masker import ( "bytes" "io" ) var mask = []byte("[MASKED]") type Masker struct { next io.WriteCloser } // New returns a new Masker. func New(w io.WriteCloser, phrases [][]byte) *Masker { m := &Masker{} m.next = w // Create a masker for each unique phrase for i := 0; i < len(phrases); i++ { m.next = &masker{next: m.next, phrase: phrases[i]} } return m } func (m *Masker) Write(p []byte) (n int, err error) { return m.next.Write(p) } // Close flushes any remaining data and closes the underlying writer. func (m *Masker) Close() error { return m.next.Close() } type masker struct { phrase []byte matching int next io.WriteCloser } //nolint:gocognit func (m *masker) Write(p []byte) (n int, err error) { if len(p) == 0 { return 0, nil } // fast path: if the write is "[MASKED]" from an upper-level, don't bother // processing it, send it to the next writer. if bytes.Equal(p, mask) { return m.next.Write(p) } var last int for n < len(p) { // optimization: use the faster IndexByte to jump to the start of a // potential phrase and if not found, advance the whole buffer. if m.matching == 0 { off := bytes.IndexByte(p[n:], m.phrase[0]) if off < 0 { n += len(p[n:]) break } if off > -1 { n += off } } // find out how much data we can match: the minimum of len(p) and the // remainder of the phrase. min := len(m.phrase[m.matching:]) if len(p[n:]) < min { min = len(p[n:]) } // try to match the next part of the phrase if bytes.HasPrefix(p[n:], m.phrase[m.matching:m.matching+min]) { // send any data that we've not sent prior to our match to the // next writer. _, err = m.next.Write(p[last:n]) if err != nil { return n, err } m.matching += min n += min last = n // if we've tracked each byte of our phrase, we can replace it if m.matching == len(m.phrase) { _, err := m.Write(mask) if err != nil { return n, err } m.matching = 0 } continue } // if we didn't complete a phrase match, send the tracked bytes of // the phrase to the next writer unmodified. if m.matching > 0 { _, err = m.next.Write(m.phrase[:m.matching]) if err != nil { return n, err } // if the end of this phrase matches the start of it, try again if m.phrase[0] == p[n] { m.matching = 1 last++ n++ continue } } m.matching = 0 n++ } // any unmatched data is sent to the next writer _, err = m.next.Write(p[last:n]) return n, err } // Close flushes any remaining data and closes the underlying writer. func (m *masker) Close() error { var werr error if m.matching == len(m.phrase) { // this mask is added to avoid a potential undiscovered edge-case: // this should be unreachable as we replace full matches immediately in // Write(). _, werr = m.next.Write(mask) } else { _, werr = m.next.Write(m.phrase[:m.matching]) } err := m.next.Close() if err == nil { return werr } return err } ================================================ FILE: common/buildlogger/internal/masker/masker_test.go ================================================ //go:build !integration package masker import ( "bytes" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal" ) func TestMasking(t *testing.T) { tests := []struct { input string values []string expected string }{ { input: "empty secrets have no affect", values: []string{""}, expected: "empty secrets have no affect", }, { input: "no escaping at all", expected: "no escaping at all", }, { input: "secrets", values: []string{"secrets"}, expected: "[MASKED]", }, { input: "secret|s", values: []string{"secrets"}, expected: "[MASKED]", }, { input: "s|ecrets", values: []string{"secrets"}, expected: "[MASKED]", }, { input: "secretssecrets", values: []string{"secrets"}, expected: "[MASKED][MASKED]", }, { input: "ssecrets", values: []string{"secrets"}, expected: "s[MASKED]", }, { input: "s|secrets", values: []string{"secrets"}, expected: "s[MASKED]", }, { input: "at the start of the buffer", values: []string{"at"}, expected: "[MASKED] the start of the buffer", }, { input: "in the middle of the buffer", values: []string{"middle"}, expected: "in the [MASKED] of the buffer", }, { input: "at the end of the buffer", values: []string{"buffer"}, expected: "at the end of the [MASKED]", }, { input: "all values are masked", values: []string{"all", "values", "are", "masked"}, expected: "[MASKED] [MASKED] [MASKED] [MASKED]", }, { input: "prefixed and suffixed: xfoox ybary ffoo barr ffooo bbarr", values: []string{"foo", "bar"}, expected: "prefixed and suffixed: x[MASKED]x y[MASKED]y f[MASKED] [MASKED]r f[MASKED]o b[MASKED]r", }, { input: "prefix|ed, su|ffi|xed |and split|:| xfo|ox y|bary ffo|o ba|rr ffooo b|barr", values: []string{"foo", "bar"}, expected: "prefixed, suffixed and split: x[MASKED]x y[MASKED]y f[MASKED] [MASKED]r f[MASKED]o b[MASKED]r", }, { input: "sp|lit al|l val|ues ar|e |mask|ed", values: []string{"split", "all", "values", "are", "masked"}, expected: "[MASKED] [MASKED] [MASKED] [MASKED] [MASKED]", }, { input: "prefix_mask mask prefix_|mask prefix_ma|sk mas|k", values: []string{"mask", "prefix_mask"}, expected: "[MASKED] [MASKED] [MASKED] [MASKED] [MASKED]", }, { input: "large secret: " + strings.Repeat("_", 8000) + "|" + strings.Repeat("_", 8000), values: []string{strings.Repeat("_", 8000*2)}, expected: "large secret: [MASKED]", }, { input: "overlap: this is the en| foobar", values: []string{"this is the end", "en foobar", "en"}, expected: "overlap: this is the [MASKED]", }, } for _, tc := range tests { t.Run(tc.input, func(t *testing.T) { buf := new(bytes.Buffer) m := New(internal.NewNopCloser(buf), internal.Unique(tc.values)) parts := bytes.Split([]byte(tc.input), []byte{'|'}) for _, part := range parts { n, err := m.Write(part) require.NoError(t, err) assert.Equal(t, len(part), n) } require.NoError(t, m.Close()) assert.Equal(t, tc.expected, buf.String()) }) } } ================================================ FILE: common/buildlogger/internal/nopcloser.go ================================================ package internal import "io" type nopCloser struct { io.Writer } func (nopCloser) Close() error { return nil } func NewNopCloser(w io.Writer) io.WriteCloser { return nopCloser{w} } ================================================ FILE: common/buildlogger/internal/sync.go ================================================ package internal import ( "io" "sync" ) type syncWriter struct { mu sync.Mutex w io.WriteCloser } func NewSync(w io.WriteCloser) *syncWriter { return &syncWriter{w: w} } func (s *syncWriter) Write(p []byte) (int, error) { s.mu.Lock() defer s.mu.Unlock() return s.w.Write(p) } func (s *syncWriter) Close() error { s.mu.Lock() defer s.mu.Unlock() return s.w.Close() } ================================================ FILE: common/buildlogger/internal/tee.go ================================================ package internal import ( "fmt" "io" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/helpers" ) // Tee is a log writer that targets both the job/build log _and_ the runner log, // writing to both. type Tee struct { logFn func(args ...any) entry *logrus.Entry // noLog stops teeing to the runner log, this is essentially used by // runner tests where both the build and runner logs both use the same // destination (like stdout), as well as builds logs where we want separate // structured log lines in the runner log vs the build.. noLog bool } func NewTee(logFn func(args ...any), entry *logrus.Entry, disable bool) Tee { return Tee{logFn, entry, disable} } func (t *Tee) WithFields(fields logrus.Fields) Tee { return Tee{ logFn: t.logFn, entry: t.entry.WithFields(fields), noLog: t.noLog, } } func (t *Tee) WithoutLog() Tee { return Tee{ logFn: t.logFn, entry: t.entry, noLog: true, } } func (t *Tee) WriterLevel(level logrus.Level) *io.PipeWriter { return t.entry.WriterLevel(level) } func (t *Tee) log(level logrus.Level, logPrefix string, args ...interface{}) { if t.entry == nil { return } // log lines have spaces between each argument, followed by an ANSI Reset and *then* a new-line. // // To achieve this, we use fmt.Sprintln and remove the newline, add the ANSI Reset and then // append the newline again. The reason we don't use fmt.Sprint is that there's a greater // difference between that and fmt.Sprintln than just the newline character being added // (fmt.Sprintln consistently adds a space between arguments). logLine := fmt.Sprintln(args...) logLine = logLine[:len(logLine)-1] logLine += helpers.ANSI_RESET + "\n" if t.logFn != nil { t.logFn(logPrefix + logLine) } // don't tee to logrus entry (runner log) when disabled or no args if t.noLog || len(args) == 0 { return } t.entry.Logln(level, args...) } func (t *Tee) Debugln(args ...interface{}) { if t.entry == nil { return } t.entry.Debugln(args...) } func (t *Tee) Println(args ...interface{}) { t.log(logrus.DebugLevel, helpers.ANSI_CLEAR, args...) } func (t *Tee) Infoln(args ...interface{}) { t.log(logrus.InfoLevel, helpers.ANSI_BOLD_GREEN, args...) } func (t *Tee) Warningln(args ...interface{}) { t.log(logrus.WarnLevel, helpers.ANSI_YELLOW+"WARNING: ", args...) } func (t *Tee) SoftErrorln(args ...interface{}) { t.log(logrus.WarnLevel, helpers.ANSI_BOLD_RED+"ERROR: ", args...) } func (t *Tee) Errorln(args ...interface{}) { t.log(logrus.ErrorLevel, helpers.ANSI_BOLD_RED+"ERROR: ", args...) } ================================================ FILE: common/buildlogger/internal/testdata/corpus/ipsum ================================================ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Aliquam bibendum sed lacus ut molestie. Praesent nec odio vel sem finibus porttitor. Maecenas et elementum mi. Morbi sit amet eros suscipit, commodo purus eu, pulvinar lacus. Suspendisse quis eleifend felis. Morbi gravida metus id suscipit sollicitudin. Cras pulvinar quam et tortor porttitor, sed iaculis quam fringilla. Curabitur fringilla fermentum porta. In efficitur ligula efficitur congue lacinia. Etiam elementum pharetra neque, consectetur tincidunt nibh vestibulum a. Aenean sit amet dui sed ipsum euismod placerat vitae at ante. Sed a urna lacus. Vivamus sed lectus purus. Duis tristique nisi in lacinia pharetra. Suspendisse id nulla venenatis, semper turpis non, luctus orci. Quisque feugiat et orci eget vestibulum. Cras elementum tortor a velit pretium, quis venenatis odio luctus. Ut libero tortor, iaculis venenatis ullamcorper ut, rhoncus in turpis. Donec nisi mi, blandit a suscipit ut, iaculis eu dolor. Cras varius suscipit urna, quis sodales mi. Sed et vestibulum erat. Donec a ante eget odio vulputate fringilla. Quisque vel magna bibendum, bibendum velit at, viverra enim. Nulla et neque nec urna euismod pretium sit amet in ante. Nam tincidunt ultricies mi. Vivamus ac nibh dignissim odio laoreet tempus. Integer vel consectetur lectus. Duis eget bibendum eros. Quisque pharetra, lacus et ultrices tristique, lorem diam sodales felis, egestas ornare ligula neque accumsan ex. Integer volutpat nisl lorem. Maecenas egestas ligula vel felis pulvinar efficitur. Curabitur viverra, orci id ullamcorper mollis, sapien ante tincidunt elit, ut cursus urna quam sed neque. Nunc et varius ex, sit amet vehicula quam. In consectetur metus eros, nec consectetur diam dignissim et. Nulla eget auctor metus, in tempor nunc. Duis eu orci quis sem iaculis fermentum. Praesent suscipit ipsum ac libero sagittis, tristique dictum felis ultrices. Mauris vehicula orci sit amet felis iaculis, in ultricies lacus fermentum. Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas. Curabitur porta in quam tincidunt lacinia. Phasellus lobortis nisl eget est pulvinar, nec convallis ligula congue. Ut id nisi tincidunt, aliquam mauris eu, mattis orci. Curabitur nunc ligula, commodo a augue et, pharetra iaculis erat. Ut mollis consectetur libero vel maximus. Morbi egestas turpis leo, at dignissim dui scelerisque at. Aliquam fermentum lacus risus, vel ultricies risus blandit malesuada. Nulla augue libero, tincidunt et orci nec, fermentum tempor nibh. Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus. Aliquam tellus dui, fermentum luctus sagittis vel, mattis sit amet turpis. Mauris eu lectus ut enim auctor elementum quis non turpis. Nullam auctor eleifend molestie. Nulla id ornare diam, a vulputate felis. Sed maximus blandit vestibulum. Mauris sed dignissim est. In quis metus urna. Praesent vehicula nisl quam, a pretium risus pretium ut. Quisque mollis augue vel turpis rutrum rutrum non non turpis. Pellentesque consequat ante ac neque consectetur hendrerit. Suspendisse finibus ornare quam, sit amet pellentesque ante pretium eget. Integer quis eros ligula. Ut id nulla enim. Etiam interdum pellentesque nunc, in pulvinar purus scelerisque eget. Nulla aliquam lorem sodales maximus volutpat. ================================================ FILE: common/buildlogger/internal/testdata/corpus/log-1 ================================================ Running with gitlab-runner 13.12.0-rc1 (b21d5c5b)  on gitlab-org-docker ih9XD9p3  feature flags: FF_GITLAB_REGISTRY_HELPER_IMAGE:true, FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE:true section_start:1621524382:resolve_secrets Resolving secrets section_end:1621524382:resolve_secrets section_start:1621524382:prepare_executor Preparing the "docker+machine" executor Using Docker executor with image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 ... Starting service docker:20.10.2-dind ... Pulling docker image docker:20.10.2-dind ... Using docker image sha256:7569a61fe0d5af655280b516bb2654a1ef03f7a3d67549543b65d81dbeea372e for docker:20.10.2-dind with digest docker@sha256:8f4e9ddda1049e6935f9fc7f5cad0bd1001fbf59188616f19b620fd7b6e95ba2 ... Waiting for services to be up and running... Authenticating with credentials from job payload (GitLab Registry) Pulling docker image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 ... Using docker image sha256:ae3c432ccac98231f52393c158c545eb689584defed228600b87e2fe4e4fa1e9 for registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 with digest registry.gitlab.com/gitlab-org/gitlab-runner/ci@sha256:0436a4d75851db641f3c704688e0e27a3e208f4bc948503c1b35b7e1691b5cf6 ... section_end:1621524429:prepare_executor section_start:1621524429:prepare_script Preparing environment Running on runner-ih9xd9p3-project-250833-concurrent-0 via runner-ih9xd9p3-org-ci-1621524292-bb661501... section_end:1621524456:prepare_script section_start:1621524456:get_sources Getting source from Git repository $ eval "$CI_PRE_CLONE_SCRIPT" Fetching changes... Initialized empty Git repository in /builds/gitlab-org/gitlab-runner/.git/ Created fresh repository. Checking out 7a6612da as v13.12.0... Skipping Git submodules setup section_end:1621524510:get_sources section_start:1621524510:restore_cache Restoring cache Checking cache for unit test 1/8-v13-12-0-2... FATAL: file does not exist  Failed to extract cache section_end:1621524511:restore_cache section_start:1621524511:download_artifacts Downloading artifacts Downloading artifacts for helper images (1280281190)... Downloading artifacts from coordinator... ok  id=1280281190 status=200 token=zaM3ywFV Downloading artifacts for clone test repo (1280281192)... Downloading artifacts from coordinator... ok  id=1280281192 status=200 token=xzA1hsVL Downloading artifacts for tests definitions (1280281194)... Downloading artifacts from coordinator... ok  id=1280281194 status=200 token=kQK1ELdZ section_end:1621524516:download_artifacts section_start:1621524516:step_script Executing "step_script" stage of the job script Using docker image sha256:ae3c432ccac98231f52393c158c545eb689584defed228600b87e2fe4e4fa1e9 for registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 with digest registry.gitlab.com/gitlab-org/gitlab-runner/ci@sha256:0436a4d75851db641f3c704688e0e27a3e208f4bc948503c1b35b7e1691b5cf6 ... $ mkdir -p "$GOCACHE" $ source ci/touch_make_dependencies touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64-windows.exe touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64 touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.s390x touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.arm touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.arm64 touching out/helper-images/prebuilt-arm64.tar.xz touching out/helper-images/prebuilt-arm.tar.xz touching out/helper-images/prebuilt-s390x.tar.xz touching out/helper-images/prebuilt-x86_64.tar.xz touching out/helper-images/prebuilt-x86_64-pwsh.tar.xz $ make parallel_test_execute # Pulling images required for some tests go: downloading github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: downloading github.com/BurntSushi/toml v0.3.1 go: downloading github.com/docker/docker v20.10.2+incompatible go: downloading github.com/sirupsen/logrus v1.7.0 go: downloading k8s.io/api v0.0.0-20191004102349-159aefb8556b go: downloading github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: downloading github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: downloading github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: downloading gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: extracting gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: downloading github.com/prometheus/client_golang v1.1.0 go: extracting github.com/BurntSushi/toml v0.3.1 go: extracting github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: downloading gopkg.in/yaml.v2 v2.3.0 go: downloading github.com/urfave/cli v1.20.0 go: extracting gopkg.in/yaml.v2 v2.3.0 go: extracting github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: extracting github.com/sirupsen/logrus v1.7.0 go: downloading github.com/pmezard/go-difflib v1.0.0 go: downloading k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: downloading golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: extracting github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: downloading github.com/davecgh/go-spew v1.1.1 go: extracting github.com/urfave/cli v1.20.0 go: extracting github.com/pmezard/go-difflib v1.0.0 go: downloading github.com/gorilla/websocket v1.4.2 go: extracting github.com/prometheus/client_golang v1.1.0 go: extracting github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: downloading github.com/stretchr/objx v0.3.0 go: extracting k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: extracting github.com/gorilla/websocket v1.4.2 go: extracting github.com/stretchr/objx v0.3.0 go: downloading github.com/prometheus/common v0.6.0 go: downloading github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: downloading github.com/docker/go-connections v0.3.0 go: downloading github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: extracting github.com/davecgh/go-spew v1.1.1 go: downloading github.com/json-iterator/go v1.1.10 go: extracting github.com/json-iterator/go v1.1.10 go: extracting github.com/docker/go-connections v0.3.0 go: downloading github.com/golang/protobuf v1.4.3 go: extracting github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: downloading github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: extracting github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: extracting github.com/prometheus/common v0.6.0 go: extracting k8s.io/api v0.0.0-20191004102349-159aefb8556b go: extracting golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: extracting github.com/golang/protobuf v1.4.3 go: downloading golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: extracting github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: downloading github.com/pkg/errors v0.9.1 go: downloading google.golang.org/protobuf v1.25.0 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: extracting github.com/pkg/errors v0.9.1 go: downloading github.com/gogo/protobuf v1.1.1 go: extracting github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading gopkg.in/inf.v0 v0.9.0 go: extracting gopkg.in/inf.v0 v0.9.0 go: extracting golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: extracting google.golang.org/protobuf v1.25.0 go: extracting github.com/docker/docker v20.10.2+incompatible go: downloading github.com/beorn7/perks v1.0.1 go: downloading gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: extracting github.com/gogo/protobuf v1.1.1 go: extracting gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: extracting github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/procfs v0.0.5 go: downloading github.com/google/gofuzz v1.0.0 go: extracting github.com/google/gofuzz v1.0.0 go: downloading golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: extracting github.com/prometheus/procfs v0.0.5 go: downloading github.com/hashicorp/vault/api v1.0.4 go: extracting github.com/hashicorp/vault/api v1.0.4 go: downloading github.com/hashicorp/go-multierror v1.0.0 go: extracting github.com/hashicorp/go-multierror v1.0.0 go: downloading github.com/hashicorp/vault/sdk v0.1.13 go: extracting golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: extracting github.com/hashicorp/vault/sdk v0.1.13 go: downloading github.com/hashicorp/hcl v1.0.0 go: downloading gopkg.in/square/go-jose.v2 v2.3.1 go: downloading k8s.io/klog v1.0.0 go: extracting gopkg.in/square/go-jose.v2 v2.3.1 go: extracting k8s.io/klog v1.0.0 go: downloading github.com/mitchellh/mapstructure v1.4.0 go: extracting github.com/hashicorp/hcl v1.0.0 go: extracting github.com/mitchellh/mapstructure v1.4.0 go: downloading github.com/hashicorp/go-sockaddr v1.0.2 go: downloading github.com/hashicorp/go-cleanhttp v0.5.1 go: extracting github.com/hashicorp/go-sockaddr v1.0.2 go: extracting github.com/hashicorp/go-cleanhttp v0.5.1 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/pierrec/lz4 v2.0.5+incompatible go: extracting github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/hashicorp/go-rootcerts v1.0.1 go: downloading github.com/hashicorp/go-retryablehttp v0.5.4 go: extracting github.com/hashicorp/go-retryablehttp v0.5.4 go: downloading github.com/modern-go/reflect2 v1.0.1 go: extracting github.com/hashicorp/go-rootcerts v1.0.1 go: extracting github.com/modern-go/reflect2 v1.0.1 go: downloading golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: downloading github.com/golang/snappy v0.0.1 go: extracting github.com/golang/snappy v0.0.1 go: downloading github.com/ryanuber/go-glob v1.0.0 go: extracting golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: downloading golang.org/x/text v0.3.6 go: extracting github.com/ryanuber/go-glob v1.0.0 go: downloading github.com/opencontainers/image-spec v1.0.1 go: extracting github.com/opencontainers/image-spec v1.0.1 go: downloading github.com/opencontainers/go-digest v1.0.0-rc1 go: downloading github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: downloading github.com/containerd/containerd v1.4.3 go: downloading github.com/morikuni/aec v1.0.0 go: downloading github.com/docker/distribution v2.7.0+incompatible go: downloading google.golang.org/grpc v1.34.0 go: extracting github.com/pierrec/lz4 v2.0.5+incompatible go: extracting github.com/opencontainers/go-digest v1.0.0-rc1 go: extracting github.com/morikuni/aec v1.0.0 go: extracting github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: extracting github.com/docker/distribution v2.7.0+incompatible go: extracting google.golang.org/grpc v1.34.0 go: extracting github.com/containerd/containerd v1.4.3 go: downloading github.com/matttproud/golang_protobuf_extensions v1.0.1 go: extracting github.com/matttproud/golang_protobuf_extensions v1.0.1 go: downloading google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: extracting golang.org/x/text v0.3.6 go: extracting google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: finding github.com/BurntSushi/toml v0.3.1 go: finding github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: finding github.com/prometheus/client_golang v1.1.0 go: finding github.com/beorn7/perks v1.0.1 go: finding github.com/golang/protobuf v1.4.3 go: finding google.golang.org/protobuf v1.25.0 go: finding github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: finding github.com/prometheus/common v0.6.0 go: finding github.com/matttproud/golang_protobuf_extensions v1.0.1 go: finding github.com/prometheus/procfs v0.0.5 go: finding github.com/sirupsen/logrus v1.7.0 go: finding golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: finding github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: finding github.com/davecgh/go-spew v1.1.1 go: finding github.com/pmezard/go-difflib v1.0.0 go: finding github.com/stretchr/objx v0.3.0 go: finding gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: finding github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: finding github.com/urfave/cli v1.20.0 go: finding gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: finding github.com/docker/docker v20.10.2+incompatible go: finding gopkg.in/yaml.v2 v2.3.0 go: finding k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: finding github.com/docker/go-connections v0.3.0 go: finding github.com/opencontainers/image-spec v1.0.1 go: finding github.com/opencontainers/go-digest v1.0.0-rc1 go: finding github.com/gogo/protobuf v1.1.1 go: finding github.com/containerd/containerd v1.4.3 go: finding github.com/pkg/errors v0.9.1 go: finding google.golang.org/grpc v1.34.0 go: finding google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: finding github.com/docker/distribution v2.7.0+incompatible go: finding golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: finding github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: finding github.com/morikuni/aec v1.0.0 go: finding github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: finding golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: finding github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: finding github.com/hashicorp/vault/api v1.0.4 go: finding github.com/hashicorp/errwrap v1.0.0 go: finding github.com/hashicorp/go-cleanhttp v0.5.1 go: finding github.com/hashicorp/go-multierror v1.0.0 go: finding github.com/hashicorp/go-retryablehttp v0.5.4 go: finding github.com/hashicorp/go-rootcerts v1.0.1 go: finding github.com/hashicorp/hcl v1.0.0 go: finding github.com/hashicorp/vault/sdk v0.1.13 go: finding github.com/golang/snappy v0.0.1 go: finding github.com/pierrec/lz4 v2.0.5+incompatible go: finding github.com/hashicorp/go-sockaddr v1.0.2 go: finding github.com/ryanuber/go-glob v1.0.0 go: finding github.com/mitchellh/mapstructure v1.4.0 go: finding golang.org/x/text v0.3.6 go: finding golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: finding gopkg.in/square/go-jose.v2 v2.3.1 go: finding github.com/json-iterator/go v1.1.10 go: finding github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: finding github.com/modern-go/reflect2 v1.0.1 go: finding github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: finding github.com/gorilla/websocket v1.4.2 go: finding k8s.io/api v0.0.0-20191004102349-159aefb8556b go: finding gopkg.in/inf.v0 v0.9.0 go: finding github.com/google/gofuzz v1.0.0 go: finding k8s.io/klog v1.0.0 [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] latest: Pulling from gitlab-org/gitlab-runner/alpine-no-root [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Pulling fs layer [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Pulling fs layer [docker:18-git] 18-git: Pulling from library/docker [docker:18-dind] 18-dind: Pulling from library/docker [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Verifying Checksum [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Download complete [alpine:3.12.0] 3.12.0: Pulling from library/alpine [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Verifying Checksum [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Download complete [docker:18-dind] 9d48c3bd43c5: Pulling fs layer [docker:18-dind] 7f94eaf8af20: Pulling fs layer [docker:18-dind] 9fe9984849c1: Pulling fs layer [docker:18-dind] 3091f1b4f1aa: Pulling fs layer [docker:18-dind] 6ef266ac0949: Pulling fs layer [docker:18-dind] b2c2c13f4c08: Pulling fs layer [docker:18-dind] f354b3ae6d74: Pulling fs layer [docker:18-dind] 8f4a6170836f: Pulling fs layer [docker:18-dind] 853fedec02a1: Pulling fs layer [docker:18-dind] a57a377d7e5d: Pulling fs layer [docker:18-dind] ac4bc61da695: Pulling fs layer [docker:18-dind] 3091f1b4f1aa: Waiting [docker:18-dind] 6ef266ac0949: Waiting [docker:18-dind] b2c2c13f4c08: Waiting [docker:18-dind] f354b3ae6d74: Waiting [docker:18-dind] 8f4a6170836f: Waiting [docker:18-dind] 853fedec02a1: Waiting [docker:18-dind] a57a377d7e5d: Waiting [docker:18-dind] ac4bc61da695: Waiting [alpine:3.12.0] df20fa9351a1: Pulling fs layer [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Pull complete [alpine:3.12.0] df20fa9351a1: Pull complete [docker:18-git] 9d48c3bd43c5: Pulling fs layer [docker:18-git] 7f94eaf8af20: Pulling fs layer [docker:18-git] 9fe9984849c1: Pulling fs layer [docker:18-git] 3091f1b4f1aa: Pulling fs layer [docker:18-git] 6ef266ac0949: Pulling fs layer [docker:18-git] b2c2c13f4c08: Pulling fs layer [docker:18-git] f354b3ae6d74: Pulling fs layer [docker:18-git] 6ab2580d9dce: Pulling fs layer [docker:18-git] 3091f1b4f1aa: Waiting [docker:18-git] 6ef266ac0949: Waiting [docker:18-git] b2c2c13f4c08: Waiting [docker:18-git] f354b3ae6d74: Waiting [docker:18-git] 6ab2580d9dce: Waiting [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Pull complete [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] Digest: sha256:034971042d77defbcd01dbc1c163b5cf03397bc3ab5228b0943e019eb9f5f824 [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] Status: Downloaded newer image for registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest [docker:18-dind] 9fe9984849c1: Verifying Checksum [docker:18-dind] 9fe9984849c1: Download complete [docker:18-git] 9fe9984849c1: Verifying Checksum [docker:18-git] 9fe9984849c1: Download complete [alpine:3.12.0] Digest: sha256:185518070891758909c9f839cf4ca393ee977ac378609f700f60a771a2dfe321 [alpine:3.12.0] Status: Downloaded newer image for alpine:3.12.0 [alpine:3.12.0] docker.io/library/alpine:3.12.0 [docker:18-dind] 7f94eaf8af20: Verifying Checksum [docker:18-dind] 7f94eaf8af20: Download complete [docker:18-git] 7f94eaf8af20: Verifying Checksum [docker:18-git] 7f94eaf8af20: Download complete [docker:18-dind] 9d48c3bd43c5: Verifying Checksum [docker:18-git] 9d48c3bd43c5: Download complete [docker:18-dind] 9d48c3bd43c5: Download complete [docker:18-dind] 6ef266ac0949: Verifying Checksum [docker:18-dind] 6ef266ac0949: Download complete [docker:18-git] 6ef266ac0949: Verifying Checksum [docker:18-git] 6ef266ac0949: Download complete [docker:18-dind] 9d48c3bd43c5: Pull complete [docker:18-git] 9d48c3bd43c5: Pull complete [docker:18-dind] b2c2c13f4c08: Verifying Checksum [docker:18-dind] b2c2c13f4c08: Download complete [docker:18-git] b2c2c13f4c08: Verifying Checksum [docker:18-git] b2c2c13f4c08: Download complete [docker:18-dind] 7f94eaf8af20: Pull complete [docker:18-git] 7f94eaf8af20: Pull complete [docker:18-dind] f354b3ae6d74: Verifying Checksum [docker:18-dind] f354b3ae6d74: Download complete [docker:18-git] f354b3ae6d74: Verifying Checksum [docker:18-git] f354b3ae6d74: Download complete [docker:18-dind] 9fe9984849c1: Pull complete [docker:18-git] 9fe9984849c1: Pull complete [docker:18-dind] 853fedec02a1: Verifying Checksum [docker:18-dind] 853fedec02a1: Download complete [docker:18-dind] 8f4a6170836f: Verifying Checksum [docker:18-dind] 8f4a6170836f: Download complete [docker:18-dind] a57a377d7e5d: Verifying Checksum [docker:18-dind] a57a377d7e5d: Download complete [docker:18-dind] ac4bc61da695: Verifying Checksum [docker:18-dind] ac4bc61da695: Download complete [docker:18-dind] 3091f1b4f1aa: Verifying Checksum [docker:18-git] 3091f1b4f1aa: Verifying Checksum [docker:18-git] 3091f1b4f1aa: Download complete [docker:18-dind] 3091f1b4f1aa: Download complete [docker:18-git] 6ab2580d9dce: Verifying Checksum [docker:18-git] 6ab2580d9dce: Download complete [docker:18-git] 3091f1b4f1aa: Pull complete [docker:18-dind] 3091f1b4f1aa: Pull complete [docker:18-dind] 6ef266ac0949: Pull complete [docker:18-git] 6ef266ac0949: Pull complete [docker:18-dind] b2c2c13f4c08: Pull complete [docker:18-git] b2c2c13f4c08: Pull complete [docker:18-git] f354b3ae6d74: Pull complete [docker:18-dind] f354b3ae6d74: Pull complete [docker:18-dind] 8f4a6170836f: Pull complete [docker:18-dind] 853fedec02a1: Pull complete [docker:18-dind] a57a377d7e5d: Pull complete [docker:18-git] 6ab2580d9dce: Pull complete [docker:18-git] Digest: sha256:5fafa7fc518da8990feb9983a6f0d5069b8e4717e3f922e23e445a50e6c731ec [docker:18-git] Status: Downloaded newer image for docker:18-git [docker:18-git] docker.io/library/docker:18-git [docker:18-dind] ac4bc61da695: Pull complete [docker:18-dind] Digest: sha256:86df3c3573065f2c6f24cd925fd5bc3a0aff899bdf664ff4d2e3ebab26d96bed [docker:18-dind] Status: Downloaded newer image for docker:18-dind [docker:18-dind] docker.io/library/docker:18-dind # Executing tests Number of definitions: 112 Suite size: 8 Suite index: 1 Execution size: 15 Execution offset: 1  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/cache' package with coverprofile in 'count' mode:  === RUN TestCreateAdapter === RUN TestCreateAdapter/adapter_exists === RUN TestCreateAdapter/adapter_errors_on_factorize === RUN TestCreateAdapter/adapter_doesn't_exist --- PASS: TestCreateAdapter (0.00s) --- PASS: TestCreateAdapter/adapter_exists (0.00s) --- PASS: TestCreateAdapter/adapter_errors_on_factorize (0.00s) --- PASS: TestCreateAdapter/adapter_doesn't_exist (0.00s) === RUN TestDoubledRegistration --- PASS: TestDoubledRegistration (0.00s) === RUN TestCacheOperations === RUN TestCacheOperations/adapter-exists === RUN TestCacheOperations/adapter-exists/GetDownloadURL === RUN TestCacheOperations/adapter-exists/GetUploadURL === RUN TestCacheOperations/adapter-exists/GetGoCloudURL === RUN TestCacheOperations/no-config === RUN TestCacheOperations/no-config/GetDownloadURL time="2021-05-20T15:29:22Z" level=warning msg="Cache config not defined. Skipping cache operation." === RUN TestCacheOperations/no-config/GetUploadURL time="2021-05-20T15:29:22Z" level=warning msg="Cache config not defined. Skipping cache operation." === RUN TestCacheOperations/no-config/GetGoCloudURL time="2021-05-20T15:29:22Z" level=warning msg="Cache config not defined. Skipping cache operation." === RUN TestCacheOperations/key-not-specified === RUN TestCacheOperations/key-not-specified/GetDownloadURL time="2021-05-20T15:29:22Z" level=warning msg="Empty cache key. Skipping adapter selection." === RUN TestCacheOperations/key-not-specified/GetUploadURL time="2021-05-20T15:29:22Z" level=warning msg="Empty cache key. Skipping adapter selection." === RUN TestCacheOperations/key-not-specified/GetGoCloudURL time="2021-05-20T15:29:22Z" level=warning msg="Empty cache key. Skipping adapter selection." === RUN TestCacheOperations/adapter-doesnt-exists === RUN TestCacheOperations/adapter-doesnt-exists/GetDownloadURL === RUN TestCacheOperations/adapter-doesnt-exists/GetUploadURL === RUN TestCacheOperations/adapter-doesnt-exists/GetGoCloudURL === RUN TestCacheOperations/adapter-error-on-factorization === RUN TestCacheOperations/adapter-error-on-factorization/GetDownloadURL time="2021-05-20T15:29:22Z" level=error msg="Could not create cache adapter" error="test error" === RUN TestCacheOperations/adapter-error-on-factorization/GetUploadURL time="2021-05-20T15:29:22Z" level=error msg="Could not create cache adapter" error="test error" === RUN TestCacheOperations/adapter-error-on-factorization/GetGoCloudURL time="2021-05-20T15:29:22Z" level=error msg="Could not create cache adapter" error="test error" --- PASS: TestCacheOperations (0.00s) --- PASS: TestCacheOperations/adapter-exists (0.00s) --- PASS: TestCacheOperations/adapter-exists/GetDownloadURL (0.00s) cache_test.go:55: PASS: GetDownloadURL() --- PASS: TestCacheOperations/adapter-exists/GetUploadURL (0.00s) cache_test.go:55: PASS: GetUploadURL() --- PASS: TestCacheOperations/adapter-exists/GetGoCloudURL (0.00s) cache_test.go:55: PASS: GetGoCloudURL() --- PASS: TestCacheOperations/no-config (0.00s) --- PASS: TestCacheOperations/no-config/GetDownloadURL (0.00s) --- PASS: TestCacheOperations/no-config/GetUploadURL (0.00s) --- PASS: TestCacheOperations/no-config/GetGoCloudURL (0.00s) --- PASS: TestCacheOperations/key-not-specified (0.00s) --- PASS: TestCacheOperations/key-not-specified/GetDownloadURL (0.00s) --- PASS: TestCacheOperations/key-not-specified/GetUploadURL (0.00s) --- PASS: TestCacheOperations/key-not-specified/GetGoCloudURL (0.00s) --- PASS: TestCacheOperations/adapter-doesnt-exists (0.00s) --- PASS: TestCacheOperations/adapter-doesnt-exists/GetDownloadURL (0.00s) --- PASS: TestCacheOperations/adapter-doesnt-exists/GetUploadURL (0.00s) --- PASS: TestCacheOperations/adapter-doesnt-exists/GetGoCloudURL (0.00s) --- PASS: TestCacheOperations/adapter-error-on-factorization (0.00s) --- PASS: TestCacheOperations/adapter-error-on-factorization/GetDownloadURL (0.00s) --- PASS: TestCacheOperations/adapter-error-on-factorization/GetUploadURL (0.00s) --- PASS: TestCacheOperations/adapter-error-on-factorization/GetGoCloudURL (0.00s) === RUN TestGenerateObjectName === RUN TestGenerateObjectName/empty_key === RUN TestGenerateObjectName/short_path_is_set === RUN TestGenerateObjectName/multiple_segment_path_is_set === RUN TestGenerateObjectName/path_traversal_escapes_project_namespace === RUN TestGenerateObjectName/default_usage === RUN TestGenerateObjectName/path_is_empty === RUN TestGenerateObjectName/shared_flag_is_set_to_true === RUN TestGenerateObjectName/shared_flag_is_set_to_false === RUN TestGenerateObjectName/path_traversal_but_within_base_path === RUN TestGenerateObjectName/path_traversal_resolves_to_empty_key --- PASS: TestGenerateObjectName (0.00s) --- PASS: TestGenerateObjectName/empty_key (0.00s) --- PASS: TestGenerateObjectName/short_path_is_set (0.00s) --- PASS: TestGenerateObjectName/multiple_segment_path_is_set (0.00s) --- PASS: TestGenerateObjectName/path_traversal_escapes_project_namespace (0.00s) --- PASS: TestGenerateObjectName/default_usage (0.00s) --- PASS: TestGenerateObjectName/path_is_empty (0.00s) --- PASS: TestGenerateObjectName/shared_flag_is_set_to_true (0.00s) --- PASS: TestGenerateObjectName/shared_flag_is_set_to_false (0.00s) --- PASS: TestGenerateObjectName/path_traversal_but_within_base_path (0.00s) --- PASS: TestGenerateObjectName/path_traversal_resolves_to_empty_key (0.00s) === RUN TestCacheUploadEnv === RUN TestCacheUploadEnv/adapter_not_exists === RUN TestCacheUploadEnv/adapter_creation_error time="2021-05-20T15:29:22Z" level=error msg="Could not create cache adapter" error="test error" === RUN TestCacheUploadEnv/no_cache_config time="2021-05-20T15:29:22Z" level=warning msg="Cache config not defined. Skipping cache operation." === RUN TestCacheUploadEnv/full_map === RUN TestCacheUploadEnv/nil === RUN TestCacheUploadEnv/no_key --- PASS: TestCacheUploadEnv (0.00s) --- PASS: TestCacheUploadEnv/adapter_not_exists (0.00s) --- PASS: TestCacheUploadEnv/adapter_creation_error (0.00s) cache_test.go:379: PASS: GetUploadEnv() --- PASS: TestCacheUploadEnv/no_cache_config (0.00s) --- PASS: TestCacheUploadEnv/full_map (0.00s) cache_test.go:379: PASS: GetUploadEnv() --- PASS: TestCacheUploadEnv/nil (0.00s) cache_test.go:379: PASS: GetUploadEnv() --- PASS: TestCacheUploadEnv/no_key (0.00s) cache_test.go:379: PASS: GetUploadEnv() === RUN TestCreateCredentialsAdapter === RUN TestCreateCredentialsAdapter/adapter_exists === RUN TestCreateCredentialsAdapter/adapter_errors_on_factorize === RUN TestCreateCredentialsAdapter/adapter_doesn't_exist --- PASS: TestCreateCredentialsAdapter (0.00s) --- PASS: TestCreateCredentialsAdapter/adapter_exists (0.00s) --- PASS: TestCreateCredentialsAdapter/adapter_errors_on_factorize (0.00s) --- PASS: TestCreateCredentialsAdapter/adapter_doesn't_exist (0.00s) === RUN TestCredentialsFactoryDoubledRegistration --- PASS: TestCredentialsFactoryDoubledRegistration (0.00s) PASS coverage: 4.4% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/cache 0.021s coverage: 4.4% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/cache/azure' package with coverprofile in 'count' mode:  go: downloading github.com/Azure/azure-storage-blob-go v0.11.1-0.20201209121048-6df5d9af221d go: extracting github.com/Azure/azure-storage-blob-go v0.11.1-0.20201209121048-6df5d9af221d go: downloading github.com/google/uuid v1.1.2 go: downloading github.com/Azure/azure-pipeline-go v0.2.3 go: extracting github.com/google/uuid v1.1.2 go: extracting github.com/Azure/azure-pipeline-go v0.2.3 go: downloading github.com/mattn/go-ieproxy v0.0.1 go: extracting github.com/mattn/go-ieproxy v0.0.1 go: finding github.com/Azure/azure-storage-blob-go v0.11.1-0.20201209121048-6df5d9af221d go: finding github.com/Azure/azure-pipeline-go v0.2.3 go: finding github.com/google/uuid v1.1.2 go: finding github.com/mattn/go-ieproxy v0.0.1 === RUN TestAdapterOperation_InvalidConfig === RUN TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key === RUN TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account key" === RUN TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account key" === RUN TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetGoCloudURL time="2021-05-20T15:29:26Z" level=error msg="error parsing blob URL" error="parse azblob://\x00/key: net/url: invalid control character in URL" url="azblob://\x00/key" === RUN TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="error generating Azure SAS token" error="missing Azure storage account key" === RUN TestAdapterOperation_InvalidConfig/container-not-specified === RUN TestAdapterOperation_InvalidConfig/container-not-specified/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="ContainerName can't be empty" === RUN TestAdapterOperation_InvalidConfig/container-not-specified/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="ContainerName can't be empty" === RUN TestAdapterOperation_InvalidConfig/container-not-specified/GetGoCloudURL time="2021-05-20T15:29:26Z" level=error msg="ContainerName can't be empty" === RUN TestAdapterOperation_InvalidConfig/container-not-specified/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="ContainerName can't be empty" === RUN TestAdapterOperation_InvalidConfig/error-on-credentials-resolver-initialization === RUN TestAdapterOperation_InvalidConfig/no-azure-config === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error resolving Azure credentials" error="test error" === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error resolving Azure credentials" error="test error" === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetGoCloudURL === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="error resolving Azure credentials" error="test error" === RUN TestAdapterOperation_InvalidConfig/no-credentials === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetGoCloudURL === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="error generating Azure SAS token" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-account-name === RUN TestAdapterOperation_InvalidConfig/no-account-name/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-account-name/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-account-name/GetGoCloudURL === RUN TestAdapterOperation_InvalidConfig/no-account-name/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="error generating Azure SAS token" error="missing Azure storage account name" === RUN TestAdapterOperation_InvalidConfig/no-account-key === RUN TestAdapterOperation_InvalidConfig/no-account-key/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account key" === RUN TestAdapterOperation_InvalidConfig/no-account-key/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="missing Azure storage account key" === RUN TestAdapterOperation_InvalidConfig/no-account-key/GetGoCloudURL === RUN TestAdapterOperation_InvalidConfig/no-account-key/GetUploadEnv time="2021-05-20T15:29:26Z" level=error msg="error generating Azure SAS token" error="missing Azure storage account key" --- PASS: TestAdapterOperation_InvalidConfig (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/invalid-container-name-and-no-account-key/GetUploadEnv (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/container-not-specified (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/container-not-specified/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/container-not-specified/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/container-not-specified/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/container-not-specified/GetUploadEnv (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/error-on-credentials-resolver-initialization (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-azure-config (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadEnv (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetUploadEnv (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-name (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-name/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-name/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-name/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-name/GetUploadEnv (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-key (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-key/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-key/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-key/GetGoCloudURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-account-key/GetUploadEnv (0.00s) === RUN TestAdapterOperation === RUN TestAdapterOperation/valid-configuration === RUN TestAdapterOperation/valid-configuration/GetDownloadURL === RUN TestAdapterOperation/valid-configuration/GetUploadURL === RUN TestAdapterOperation/valid-configuration-with-leading-slash === RUN TestAdapterOperation/valid-configuration-with-leading-slash/GetDownloadURL === RUN TestAdapterOperation/valid-configuration-with-leading-slash/GetUploadURL === RUN TestAdapterOperation/error-on-URL-signing === RUN TestAdapterOperation/error-on-URL-signing/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="test error" === RUN TestAdapterOperation/error-on-URL-signing/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="test error" === RUN TestAdapterOperation/invalid-URL-returned === RUN TestAdapterOperation/invalid-URL-returned/GetDownloadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="parse ://test: missing protocol scheme" === RUN TestAdapterOperation/invalid-URL-returned/GetUploadURL time="2021-05-20T15:29:26Z" level=error msg="error generating Azure pre-signed URL" error="parse ://test: missing protocol scheme" --- PASS: TestAdapterOperation (0.00s) --- PASS: TestAdapterOperation/valid-configuration (0.00s) --- PASS: TestAdapterOperation/valid-configuration/GetDownloadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/valid-configuration/GetUploadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/valid-configuration-with-leading-slash (0.00s) --- PASS: TestAdapterOperation/valid-configuration-with-leading-slash/GetDownloadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/valid-configuration-with-leading-slash/GetUploadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/error-on-URL-signing (0.00s) --- PASS: TestAdapterOperation/error-on-URL-signing/GetDownloadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/error-on-URL-signing/GetUploadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/invalid-URL-returned (0.00s) --- PASS: TestAdapterOperation/invalid-URL-returned/GetDownloadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() --- PASS: TestAdapterOperation/invalid-URL-returned/GetUploadURL (0.00s) adapter_test.go:250: PASS: Resolve() adapter_test.go:250: PASS: Credentials() === RUN TestAzureClientURLGeneration === RUN TestAzureClientURLGeneration/missing_account_name === RUN TestAzureClientURLGeneration/missing_account_key === RUN TestAzureClientURLGeneration/GET_request === RUN TestAzureClientURLGeneration/GET_request_in_custom_storage_domain === RUN TestAzureClientURLGeneration/PUT_request --- PASS: TestAzureClientURLGeneration (0.00s) --- PASS: TestAzureClientURLGeneration/missing_account_name (0.00s) --- PASS: TestAzureClientURLGeneration/missing_account_key (0.00s) --- PASS: TestAzureClientURLGeneration/GET_request (0.00s) --- PASS: TestAzureClientURLGeneration/GET_request_in_custom_storage_domain (0.00s) --- PASS: TestAzureClientURLGeneration/PUT_request (0.00s) === RUN TestDefaultCredentialsResolver === RUN TestDefaultCredentialsResolver/config_is_nil === RUN TestDefaultCredentialsResolver/credentials_not_set === RUN TestDefaultCredentialsResolver/credentials_direct_in_config --- PASS: TestDefaultCredentialsResolver (0.00s) --- PASS: TestDefaultCredentialsResolver/config_is_nil (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_not_set (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_direct_in_config (0.00s) PASS coverage: 3.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/cache/azure 0.025s coverage: 3.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/cache/gcs' package with coverprofile in 'count' mode:  go: downloading cloud.google.com/go v0.72.0 go: extracting cloud.google.com/go v0.72.0 go: downloading cloud.google.com/go/storage v1.12.0 go: extracting cloud.google.com/go/storage v1.12.0 go: downloading google.golang.org/api v0.36.0 go: downloading github.com/googleapis/gax-go v2.0.2+incompatible go: downloading go.opencensus.io v0.22.5 go: extracting github.com/googleapis/gax-go v2.0.2+incompatible go: downloading github.com/googleapis/gax-go/v2 v2.0.5 go: extracting github.com/googleapis/gax-go/v2 v2.0.5 go: extracting go.opencensus.io v0.22.5 go: downloading github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e go: extracting github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e go: extracting google.golang.org/api v0.36.0 go: downloading golang.org/x/oauth2 v0.0.0-20201203001011-0b49973bad19 go: extracting golang.org/x/oauth2 v0.0.0-20201203001011-0b49973bad19 go: finding cloud.google.com/go/storage v1.12.0 go: finding cloud.google.com/go v0.72.0 go: finding github.com/googleapis/gax-go/v2 v2.0.5 go: finding google.golang.org/api v0.36.0 go: finding go.opencensus.io v0.22.5 go: finding github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e go: finding golang.org/x/oauth2 v0.0.0-20201203001011-0b49973bad19 === RUN TestAdapterOperation_InvalidConfig === RUN TestAdapterOperation_InvalidConfig/error-on-credentials-resolver-initialization === RUN TestAdapterOperation_InvalidConfig/no-gcs-config === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while resolving GCS credentials: test error" === RUN TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while resolving GCS credentials: test error" === RUN TestAdapterOperation_InvalidConfig/no-credentials === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: missing required GoogleAccessID" === RUN TestAdapterOperation_InvalidConfig/no-credentials/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: missing required GoogleAccessID" === RUN TestAdapterOperation_InvalidConfig/no-access-id === RUN TestAdapterOperation_InvalidConfig/no-access-id/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: missing required GoogleAccessID" === RUN TestAdapterOperation_InvalidConfig/no-access-id/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: missing required GoogleAccessID" === RUN TestAdapterOperation_InvalidConfig/no-private-key === RUN TestAdapterOperation_InvalidConfig/no-private-key/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: exactly one of PrivateKey or SignedBytes must be set" === RUN TestAdapterOperation_InvalidConfig/no-private-key/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: storage: exactly one of PrivateKey or SignedBytes must be set" === RUN TestAdapterOperation_InvalidConfig/bucket-not-specified === RUN TestAdapterOperation_InvalidConfig/bucket-not-specified/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="BucketName can't be empty" === RUN TestAdapterOperation_InvalidConfig/bucket-not-specified/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="BucketName can't be empty" --- PASS: TestAdapterOperation_InvalidConfig (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/error-on-credentials-resolver-initialization (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-gcs-config (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/credentials-resolver-resolve-error/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-credentials/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-access-id (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-access-id/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-access-id/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-private-key (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-private-key/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/no-private-key/GetUploadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/bucket-not-specified (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/bucket-not-specified/GetDownloadURL (0.00s) --- PASS: TestAdapterOperation_InvalidConfig/bucket-not-specified/GetUploadURL (0.00s) === RUN TestAdapterOperation === RUN TestAdapterOperation/valid-configuration === RUN TestAdapterOperation/valid-configuration/GetDownloadURL === RUN TestAdapterOperation/valid-configuration/GetUploadURL === RUN TestAdapterOperation/error-on-URL-signing === RUN TestAdapterOperation/error-on-URL-signing/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: test error" === RUN TestAdapterOperation/error-on-URL-signing/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while generating GCS pre-signed URL: test error" === RUN TestAdapterOperation/invalid-URL-returned === RUN TestAdapterOperation/invalid-URL-returned/GetDownloadURL time="2021-05-20T15:29:37Z" level=error msg="error while parsing generated URL: parse ://test: missing protocol scheme" === RUN TestAdapterOperation/invalid-URL-returned/GetUploadURL time="2021-05-20T15:29:37Z" level=error msg="error while parsing generated URL: parse ://test: missing protocol scheme" --- PASS: TestAdapterOperation (0.00s) --- PASS: TestAdapterOperation/valid-configuration (0.00s) --- PASS: TestAdapterOperation/valid-configuration/GetDownloadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() --- PASS: TestAdapterOperation/valid-configuration/GetUploadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() --- PASS: TestAdapterOperation/error-on-URL-signing (0.00s) --- PASS: TestAdapterOperation/error-on-URL-signing/GetDownloadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() --- PASS: TestAdapterOperation/error-on-URL-signing/GetUploadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() --- PASS: TestAdapterOperation/invalid-URL-returned (0.00s) --- PASS: TestAdapterOperation/invalid-URL-returned/GetDownloadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() --- PASS: TestAdapterOperation/invalid-URL-returned/GetUploadURL (0.00s) adapter_test.go:218: PASS: Resolve() adapter_test.go:218: PASS: Credentials() === RUN TestDefaultCredentialsResolver === RUN TestDefaultCredentialsResolver/credentials_in_both_places_-_credentials_file_takes_precedence === RUN TestDefaultCredentialsResolver/credentials_in_non-existing_credentials_file === RUN TestDefaultCredentialsResolver/credentials_in_credentials_file_-_invalid_JSON === RUN TestDefaultCredentialsResolver/config_is_nil === RUN TestDefaultCredentialsResolver/credentials_not_set === RUN TestDefaultCredentialsResolver/credentials_direct_in_config === RUN TestDefaultCredentialsResolver/credentials_in_credentials_file_-_service_account_file === RUN TestDefaultCredentialsResolver/credentials_in_credentials_file_-_unsupported_type_credentials_file --- PASS: TestDefaultCredentialsResolver (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_in_both_places_-_credentials_file_takes_precedence (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_in_non-existing_credentials_file (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_in_credentials_file_-_invalid_JSON (0.00s) --- PASS: TestDefaultCredentialsResolver/config_is_nil (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_not_set (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_direct_in_config (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_in_credentials_file_-_service_account_file (0.00s) --- PASS: TestDefaultCredentialsResolver/credentials_in_credentials_file_-_unsupported_type_credentials_file (0.00s) PASS coverage: 2.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/cache/gcs 0.021s coverage: 2.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/cache/s3' package with coverprofile in 'count' mode:  go: downloading github.com/minio/minio-go/v6 v6.0.57 go: extracting github.com/minio/minio-go/v6 v6.0.57 go: downloading github.com/minio/sha256-simd v0.1.1 go: downloading github.com/minio/md5-simd v1.1.1 go: downloading github.com/mitchellh/go-homedir v1.1.0 go: downloading gopkg.in/ini.v1 v1.62.0 go: extracting github.com/mitchellh/go-homedir v1.1.0 go: extracting github.com/minio/sha256-simd v0.1.1 go: extracting gopkg.in/ini.v1 v1.62.0 go: extracting github.com/minio/md5-simd v1.1.1 go: downloading github.com/klauspost/cpuid v1.3.1 go: extracting github.com/klauspost/cpuid v1.3.1 go: finding github.com/minio/minio-go/v6 v6.0.57 go: finding github.com/minio/sha256-simd v0.1.1 go: finding github.com/mitchellh/go-homedir v1.1.0 go: finding gopkg.in/ini.v1 v1.62.0 go: finding github.com/minio/md5-simd v1.1.1 go: finding github.com/klauspost/cpuid v1.3.1 === RUN TestCacheOperation === RUN TestCacheOperation/error-on-presigning-url === RUN TestCacheOperation/error-on-presigning-url/GetDownloadURL time="2021-05-20T15:29:41Z" level=error msg="error while generating S3 pre-signed URL" error="test error" === RUN TestCacheOperation/error-on-presigning-url/GetUploadURL time="2021-05-20T15:29:41Z" level=error msg="error while generating S3 pre-signed URL" error="test error" === RUN TestCacheOperation/presigned-url === RUN TestCacheOperation/presigned-url/GetDownloadURL === RUN TestCacheOperation/presigned-url/GetUploadURL === RUN TestCacheOperation/error-on-minio-client-initialization === RUN TestCacheOperation/error-on-minio-client-initialization/GetDownloadURL === RUN TestCacheOperation/error-on-minio-client-initialization/GetUploadURL --- PASS: TestCacheOperation (0.00s) --- PASS: TestCacheOperation/error-on-presigning-url (0.00s) --- PASS: TestCacheOperation/error-on-presigning-url/GetDownloadURL (0.00s) --- PASS: TestCacheOperation/error-on-presigning-url/GetUploadURL (0.00s) --- PASS: TestCacheOperation/presigned-url (0.00s) --- PASS: TestCacheOperation/presigned-url/GetDownloadURL (0.00s) --- PASS: TestCacheOperation/presigned-url/GetUploadURL (0.00s) --- PASS: TestCacheOperation/error-on-minio-client-initialization (0.00s) --- PASS: TestCacheOperation/error-on-minio-client-initialization/GetDownloadURL (0.00s) --- PASS: TestCacheOperation/error-on-minio-client-initialization/GetUploadURL (0.00s) === RUN TestNoConfiguration --- PASS: TestNoConfiguration (0.00s) === RUN TestGetCredentials === RUN TestGetCredentials/empty_access_key === RUN TestGetCredentials/empty_secret_key === RUN TestGetCredentials/no_S3_credentials === RUN TestGetCredentials/static_credentials === RUN TestGetCredentials/empty_access_and_secret_key --- PASS: TestGetCredentials (0.00s) --- PASS: TestGetCredentials/empty_access_key (0.00s) --- PASS: TestGetCredentials/empty_secret_key (0.00s) --- PASS: TestGetCredentials/no_S3_credentials (0.00s) --- PASS: TestGetCredentials/static_credentials (0.00s) --- PASS: TestGetCredentials/empty_access_and_secret_key (0.00s) === RUN TestMinioClientInitialization === RUN TestMinioClientInitialization/serverAddress-empty === RUN TestMinioClientInitialization/secretKey-empty === RUN TestMinioClientInitialization/only-AccessKey-defined === RUN TestMinioClientInitialization/should-use-explicit-credentials === RUN TestMinioClientInitialization/should-use-explicit-credentials-with-insecure === RUN TestMinioClientInitialization/error-on-initialization === RUN TestMinioClientInitialization/all-credentials-empty === RUN TestMinioClientInitialization/accessKey-empty === RUN TestMinioClientInitialization/only-ServerAddress-defined === RUN TestMinioClientInitialization/only-SecretKey-defined --- PASS: TestMinioClientInitialization (0.00s) --- PASS: TestMinioClientInitialization/serverAddress-empty (0.00s) --- PASS: TestMinioClientInitialization/secretKey-empty (0.00s) --- PASS: TestMinioClientInitialization/only-AccessKey-defined (0.00s) --- PASS: TestMinioClientInitialization/should-use-explicit-credentials (0.00s) --- PASS: TestMinioClientInitialization/should-use-explicit-credentials-with-insecure (0.00s) --- PASS: TestMinioClientInitialization/error-on-initialization (0.00s) --- PASS: TestMinioClientInitialization/all-credentials-empty (0.00s) --- PASS: TestMinioClientInitialization/accessKey-empty (0.00s) --- PASS: TestMinioClientInitialization/only-ServerAddress-defined (0.00s) --- PASS: TestMinioClientInitialization/only-SecretKey-defined (0.00s) PASS coverage: 2.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/cache/s3 0.020s coverage: 2.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands' package with coverprofile in 'count' mode:  go: downloading github.com/imdario/mergo v0.3.7 go: downloading github.com/docker/cli v20.10.2+incompatible go: downloading github.com/ayufan/golang-kardianos-service v0.0.0-20160429143213-0c8eb6d8fff2 go: downloading github.com/getsentry/raven-go v0.0.0-20160518204710-dffeb57df75d go: downloading gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: downloading github.com/kr/pty v1.1.1 go: downloading github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: extracting github.com/imdario/mergo v0.3.7 go: extracting github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: extracting github.com/ayufan/golang-kardianos-service v0.0.0-20160429143213-0c8eb6d8fff2 go: extracting github.com/getsentry/raven-go v0.0.0-20160518204710-dffeb57df75d go: downloading github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa go: downloading k8s.io/client-go v11.0.1-0.20191004102930-01520b8320fc+incompatible go: extracting github.com/kr/pty v1.1.1 go: downloading github.com/bmatcuk/doublestar v1.3.0 go: extracting gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: downloading github.com/hashicorp/go-version v1.2.1 go: downloading github.com/jpillora/backoff v0.0.0-20170222002228-06c7a16c845d go: extracting github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa go: extracting github.com/hashicorp/go-version v1.2.1 go: extracting github.com/jpillora/backoff v0.0.0-20170222002228-06c7a16c845d go: extracting github.com/bmatcuk/doublestar v1.3.0 go: extracting github.com/docker/cli v20.10.2+incompatible go: extracting k8s.io/client-go v11.0.1-0.20191004102930-01520b8320fc+incompatible go: downloading golang.org/x/sync v0.0.0-20201207232520-09787c993a3a go: downloading github.com/Azure/go-autorest v14.2.0+incompatible go: downloading k8s.io/utils v0.0.0-20190923111123-69764acb6e8e go: extracting github.com/Azure/go-autorest v14.2.0+incompatible go: downloading golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 go: downloading github.com/Azure/go-autorest/autorest v0.11.12 go: extracting golang.org/x/sync v0.0.0-20201207232520-09787c993a3a go: downloading github.com/gophercloud/gophercloud v0.0.0-20180425001159-e25975f29734 go: extracting k8s.io/utils v0.0.0-20190923111123-69764acb6e8e go: extracting github.com/Azure/go-autorest/autorest v0.11.12 go: extracting golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 go: downloading github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96 go: downloading github.com/Azure/go-autorest/autorest/adal v0.9.6 go: downloading github.com/Azure/go-autorest/tracing v0.6.0 go: downloading github.com/googleapis/gnostic v0.1.0 go: downloading github.com/spf13/pflag v1.0.3 go: downloading sigs.k8s.io/yaml v1.1.0 go: downloading github.com/Azure/go-autorest/logger v0.2.0 go: downloading github.com/docker/docker-credential-helpers v0.4.1 go: extracting github.com/Azure/go-autorest/autorest/adal v0.9.6 go: extracting github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96 go: downloading github.com/form3tech-oss/jwt-go v3.2.2+incompatible go: extracting github.com/Azure/go-autorest/tracing v0.6.0 go: downloading github.com/Azure/go-autorest/autorest/date v0.3.0 go: extracting github.com/Azure/go-autorest/autorest/date v0.3.0 go: extracting github.com/spf13/pflag v1.0.3 go: extracting github.com/googleapis/gnostic v0.1.0 go: extracting github.com/docker/docker-credential-helpers v0.4.1 go: extracting github.com/Azure/go-autorest/logger v0.2.0 go: extracting sigs.k8s.io/yaml v1.1.0 go: extracting github.com/form3tech-oss/jwt-go v3.2.2+incompatible go: extracting github.com/gophercloud/gophercloud v0.0.0-20180425001159-e25975f29734 go: finding github.com/ayufan/golang-kardianos-service v0.0.0-20160429143213-0c8eb6d8fff2 go: finding github.com/imdario/mergo v0.3.7 go: finding github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: finding github.com/bmatcuk/doublestar v1.3.0 go: finding github.com/kr/pty v1.1.1 go: finding github.com/docker/cli v20.10.2+incompatible go: finding github.com/getsentry/raven-go v0.0.0-20160518204710-dffeb57df75d go: finding gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: finding github.com/jpillora/backoff v0.0.0-20170222002228-06c7a16c845d go: finding github.com/docker/docker-credential-helpers v0.4.1 go: finding github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa go: finding github.com/hashicorp/go-version v1.2.1 go: finding golang.org/x/sync v0.0.0-20201207232520-09787c993a3a go: finding k8s.io/client-go v11.0.1-0.20191004102930-01520b8320fc+incompatible go: finding github.com/googleapis/gnostic v0.1.0 go: finding sigs.k8s.io/yaml v1.1.0 go: finding golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 go: finding k8s.io/utils v0.0.0-20190923111123-69764acb6e8e go: finding github.com/Azure/go-autorest/autorest v0.11.12 go: finding github.com/Azure/go-autorest/autorest/adal v0.9.6 go: finding github.com/Azure/go-autorest/autorest/date v0.3.0 go: finding github.com/Azure/go-autorest/tracing v0.6.0 go: finding github.com/form3tech-oss/jwt-go v3.2.2+incompatible go: finding github.com/Azure/go-autorest/logger v0.2.0 go: finding github.com/gophercloud/gophercloud v0.0.0-20180425001159-e25975f29734 go: finding github.com/spf13/pflag v1.0.3 go: finding github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96 === RUN TestBuildsHelperCollect time="2021-05-20T15:30:11Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 --- PASS: TestBuildsHelperCollect (0.01s) === RUN TestBuildsHelperAcquireRequestWithLimit --- PASS: TestBuildsHelperAcquireRequestWithLimit (0.00s) === RUN TestBuildsHelperAcquireRequestWithDefault --- PASS: TestBuildsHelperAcquireRequestWithDefault (0.00s) === RUN TestBuildsHelperAcquireBuildWithLimit --- PASS: TestBuildsHelperAcquireBuildWithLimit (0.00s) === RUN TestBuildsHelperAcquireBuildUnlimited --- PASS: TestBuildsHelperAcquireBuildUnlimited (0.00s) === RUN TestBuildsHelperFindSessionByURL --- PASS: TestBuildsHelperFindSessionByURL (0.00s) === RUN TestBuildsHelper_ListJobsHandler === RUN TestBuildsHelper_ListJobsHandler/no_jobs === RUN TestBuildsHelper_ListJobsHandler/job_exists --- PASS: TestBuildsHelper_ListJobsHandler (0.00s) --- PASS: TestBuildsHelper_ListJobsHandler/no_jobs (0.00s) --- PASS: TestBuildsHelper_ListJobsHandler/job_exists (0.00s) === RUN TestMetricsServer === RUN TestMetricsServer/address-set-with-port-from-cli === RUN TestMetricsServer/address-set-with-port-from-config === RUN TestMetricsServer/address-is-empty-from-cli === RUN TestMetricsServer/address-is-empty-from-config === RUN TestMetricsServer/address-is-invalid-from-cli === RUN TestMetricsServer/address-is-invalid-from-config === RUN TestMetricsServer/address-not-set-from-cli === RUN TestMetricsServer/address-not-set-from-config === RUN TestMetricsServer/address-set-without-port-from-cli === RUN TestMetricsServer/address-set-without-port-from-config === RUN TestMetricsServer/port-set-without-address-from-cli === RUN TestMetricsServer/port-set-without-address-from-config --- PASS: TestMetricsServer (0.00s) --- PASS: TestMetricsServer/address-set-with-port-from-cli (0.00s) --- PASS: TestMetricsServer/address-set-with-port-from-config (0.00s) --- PASS: TestMetricsServer/address-is-empty-from-cli (0.00s) --- PASS: TestMetricsServer/address-is-empty-from-config (0.00s) --- PASS: TestMetricsServer/address-is-invalid-from-cli (0.00s) --- PASS: TestMetricsServer/address-is-invalid-from-config (0.00s) --- PASS: TestMetricsServer/address-not-set-from-cli (0.00s) --- PASS: TestMetricsServer/address-not-set-from-config (0.00s) --- PASS: TestMetricsServer/address-set-without-port-from-cli (0.00s) --- PASS: TestMetricsServer/address-set-without-port-from-config (0.00s) --- PASS: TestMetricsServer/port-set-without-address-from-cli (0.00s) --- PASS: TestMetricsServer/port-set-without-address-from-config (0.00s) === RUN TestProcessRunner_BuildLimit --- PASS: TestProcessRunner_BuildLimit (9.01s) multi_test.go:132: PASS: Acquire(string) multi_test.go:132: PASS: Release(string,string) multi_test.go:132: PASS: CanCreate() multi_test.go:132: PASS: GetDefaultShell() multi_test.go:132: PASS: GetFeatures(string) multi_test.go:132: PASS: Create() multi_test.go:132: PASS: Prepare(string,string,string) multi_test.go:132: PASS: Cleanup() multi_test.go:132: PASS: Shell() multi_test.go:132: PASS: Finish(string) multi_test.go:132: PASS: Run(string) multi_test.go:132: PASS: RequestJob(string,string,string) multi_test.go:132: PASS: ProcessJob(string,string) multi_test.go:132: PASS: SetFailuresCollector(string) multi_test.go:132: PASS: Write(string) multi_test.go:132: PASS: IsStdout() multi_test.go:132: PASS: SetCancelFunc(string) multi_test.go:132: PASS: SetAbortFunc(string) multi_test.go:132: PASS: SetMasked(string) multi_test.go:132: PASS: Success() === RUN TestRunCommand_doJobRequest === RUN TestRunCommand_doJobRequest/requestJob_returns_immediately === RUN TestRunCommand_doJobRequest/requestJob_hangs_indefinitely === RUN TestRunCommand_doJobRequest/requestJob_interrupted_by_interrupt_signal === RUN TestRunCommand_doJobRequest/runFinished_signal_is_passed --- PASS: TestRunCommand_doJobRequest (1.00s) --- PASS: TestRunCommand_doJobRequest/requestJob_returns_immediately (0.00s) multi_test.go:209: PASS: RequestJob(string,common.RunnerConfig,string) --- PASS: TestRunCommand_doJobRequest/requestJob_hangs_indefinitely (1.00s) multi_test.go:206: PASS: RequestJob(string,common.RunnerConfig,string) --- PASS: TestRunCommand_doJobRequest/requestJob_interrupted_by_interrupt_signal (0.00s) multi_test.go:209: PASS: RequestJob(string,common.RunnerConfig,string) --- PASS: TestRunCommand_doJobRequest/runFinished_signal_is_passed (0.00s) multi_test.go:209: PASS: RequestJob(string,common.RunnerConfig,string) PASS coverage: 11.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands 10.062s coverage: 11.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 1 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands' package with coverprofile in 'count' mode:  === RUN TestRegisterDefaultDockerCacheVolume --- PASS: TestRegisterDefaultDockerCacheVolume (0.00s) === RUN TestDoNotRegisterDefaultDockerCacheVolumeWhenDisableCache --- PASS: TestDoNotRegisterDefaultDockerCacheVolumeWhenDisableCache (0.00s) === RUN TestRegisterCustomDockerCacheVolume --- PASS: TestRegisterCustomDockerCacheVolume (0.00s) === RUN TestRegisterCustomMappedDockerCacheVolume --- PASS: TestRegisterCustomMappedDockerCacheVolume (0.00s) === RUN TestConfigTemplate_Enabled === RUN TestConfigTemplate_Enabled/configuration_file_defined === RUN TestConfigTemplate_Enabled/configuration_file_not_defined --- PASS: TestConfigTemplate_Enabled (0.00s) --- PASS: TestConfigTemplate_Enabled/configuration_file_defined (0.00s) --- PASS: TestConfigTemplate_Enabled/configuration_file_not_defined (0.00s) === RUN TestConfigTemplate_MergeTo === RUN TestConfigTemplate_MergeTo/invalid_template_file === RUN TestConfigTemplate_MergeTo/no_runners_in_template === RUN TestConfigTemplate_MergeTo/multiple_runners_in_template === RUN TestConfigTemplate_MergeTo/template_doesn't_overwrite_existing_settings === RUN TestConfigTemplate_MergeTo/template_adds_additional_content === RUN TestConfigTemplate_MergeTo/error_on_merging --- PASS: TestConfigTemplate_MergeTo (0.00s) --- PASS: TestConfigTemplate_MergeTo/invalid_template_file (0.00s) --- PASS: TestConfigTemplate_MergeTo/no_runners_in_template (0.00s) --- PASS: TestConfigTemplate_MergeTo/multiple_runners_in_template (0.00s) --- PASS: TestConfigTemplate_MergeTo/template_doesn't_overwrite_existing_settings (0.00s) --- PASS: TestConfigTemplate_MergeTo/template_adds_additional_content (0.00s) --- PASS: TestConfigTemplate_MergeTo/error_on_merging (0.00s) === RUN TestSingleRunnerSigquit time="2021-05-20T15:30:24Z" level=info msg="Starting runner for http://example.com with token _test_to ..." time="2021-05-20T15:30:24Z" level=warning msg="Requested quit, waiting for builds to finish" time="2021-05-20T15:30:25Z" level=info msg="This runner has processed its build limit, so now exiting" --- PASS: TestSingleRunnerSigquit (1.01s) single_test.go:115: PASS: Prepare(string,string,string) single_test.go:115: PASS: Finish() single_test.go:115: PASS: Cleanup() single_test.go:115: PASS: Shell() single_test.go:115: PASS: Run(string) single_test.go:116: PASS: CanCreate() single_test.go:116: PASS: GetDefaultShell() single_test.go:116: PASS: GetFeatures(string) single_test.go:116: PASS: Create() single_test.go:116: PASS: Acquire(string) single_test.go:116: PASS: Release(string,string) single_test.go:117: PASS: RequestJob(string,string,string) single_test.go:117: PASS: ProcessJob(string,string) === RUN TestSingleRunnerMaxBuilds time="2021-05-20T15:30:25Z" level=info msg="Starting runner for http://example.com with token _test_to ..." time="2021-05-20T15:30:25Z" level=info msg="This runner has processed its build limit, so now exiting" --- PASS: TestSingleRunnerMaxBuilds (0.02s) single_test.go:115: PASS: Prepare(string,string,string) single_test.go:115: PASS: Finish() single_test.go:115: PASS: Cleanup() single_test.go:115: PASS: Shell() single_test.go:115: PASS: Run(string) single_test.go:116: PASS: CanCreate() single_test.go:116: PASS: GetDefaultShell() single_test.go:116: PASS: GetFeatures(string) single_test.go:116: PASS: Create() single_test.go:116: PASS: Acquire(string) single_test.go:116: PASS: Release(string,string) single_test.go:117: PASS: RequestJob(string,string,string) single_test.go:117: PASS: ProcessJob(string,string) === RUN TestAccessLevelSetting === RUN TestAccessLevelSetting/access_level_not_defined time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAccessLevelSetting/ref_protected_used time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAccessLevelSetting/not_protected_used time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAccessLevelSetting/unknown_access_level time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info time="2021-05-20T15:30:25Z" level=panic msg="Given access-level is not valid. Refer to gitlab-runner register -h for the correct options." --- PASS: TestAccessLevelSetting (0.01s) --- PASS: TestAccessLevelSetting/access_level_not_defined (0.00s) register_integration_test.go:80: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAccessLevelSetting/ref_protected_used (0.00s) register_integration_test.go:80: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAccessLevelSetting/not_protected_used (0.00s) register_integration_test.go:80: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAccessLevelSetting/unknown_access_level (0.00s) === RUN TestAskRunnerOverrideDefaultsForExecutors === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels: [kubernetes]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker: [kubernetes]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine, kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine, kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell, virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh: [docker+machine]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell: [docker+machine]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox, docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: [docker-ssh+machine]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: [docker-ssh+machine]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh: [docker]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the default Docker image (for example, ruby:2.6): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell, virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh: [docker]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine, test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes: [docker-ssh]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox, docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell, virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the default Docker image (for example, ruby:2.6): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox, docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine: [docker-ssh]: Enter the default Docker image (for example, ruby:2.6): [busybox:latest]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_answers time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_arguments,_accepting_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: [ssh]: Enter the SSH server address (for example, my.server.com): [gitlab.example.com]: Enter the SSH server port (for example, 22): [22]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_arguments_override time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: [ssh]: Enter the SSH server address (for example, my.server.com): [gitlab.example.com]: Enter the SSH server port (for example, 22): [22]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_implicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker: Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_explicit time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:25Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_explicit_with_tags_provided time="2021-05-20T15:30:25Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:25Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh: Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/basic_arguments_override time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox, docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine: [custom]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_implicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: ssh, virtualbox, kubernetes, test-max-build, docker, docker-ssh, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_explicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_explicit_with_tags_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/basic_answers time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine, kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/custom/basic_arguments,_accepting_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: [custom]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_arguments,_accepting_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine, kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox: [parallels]: Enter the Parallels VM (for example, my-vm): [parallels-vm-name]: Enter the SSH server address (for example, my.server.com): [gitlab.example.com]: Enter the SSH server port (for example, 22): [22]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_arguments_override time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker-ssh+machine, test-sigquit, custom, parallels, shell, docker+machine, kubernetes, test-max-build, docker, docker-ssh, ssh, virtualbox: [parallels]: Enter the Parallels VM (for example, my-vm): [parallels-vm-name]: Enter the SSH server address (for example, my.server.com): [gitlab.example.com]: Enter the SSH server port (for example, 22): [22]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_implicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine, test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes: Enter the Parallels VM (for example, my-vm): Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_explicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker: Enter the Parallels VM (for example, my-vm): Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_explicit_with_tags_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker: Enter the Parallels VM (for example, my-vm): Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_answers time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell: Enter the Parallels VM (for example, my-vm): Enter the SSH server address (for example, my.server.com): Enter the SSH server port (for example, 22): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_explicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the VirtualBox VM (for example, my-vm): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_explicit_with_tags_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell, virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh: Enter the VirtualBox VM (for example, my-vm): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_answers time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: Enter the VirtualBox VM (for example, my-vm): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_arguments,_accepting_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: [virtualbox]: Enter the VirtualBox VM (for example, my-vm): [virtualbox-vm-name]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_arguments_override time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: [virtualbox]: Enter the VirtualBox VM (for example, my-vm): [virtualbox-vm-name]: Enter the SSH user (for example, root): [user]: Enter the SSH password (for example, docker.io): [password]: Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): [/home/user/.ssh/id_rsa]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_implicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build: Enter the VirtualBox VM (for example, my-vm): Enter the SSH user (for example, root): Enter the SSH password (for example, docker.io): Enter the path to the SSH identity file (for example, /home/user/.ssh/id_rsa): time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_implicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_explicit time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, custom, parallels, shell, docker+machine, docker-ssh+machine, test-sigquit: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_explicit_with_tags_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh, docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/basic_answers time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: parallels, shell, docker+machine, docker-ssh+machine, test-sigquit, custom, docker-ssh, ssh, virtualbox, kubernetes, test-max-build, docker: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/basic_arguments,_accepting_provided time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: docker+machine, docker-ssh+machine, test-sigquit, custom, parallels, shell, virtualbox, kubernetes, test-max-build, docker, docker-ssh, ssh: [shell]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestAskRunnerOverrideDefaultsForExecutors/shell/basic_arguments_override time="2021-05-20T15:30:26Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:26Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [http://gitlab.example.com/]: Enter the registration token: [test-registration-token]: Enter a description for the runner: [name]: Enter tags for the runner (comma-separated): [tag,list]: Enter an executor: test-max-build, docker, docker-ssh, ssh, virtualbox, kubernetes, test-sigquit, custom, parallels, shell, docker+machine, docker-ssh+machine: [shell]: time="2021-05-20T15:30:26Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" --- PASS: TestAskRunnerOverrideDefaultsForExecutors (0.51s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes (0.02s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_answers (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_arguments,_accepting_provided (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/basic_arguments_override (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_implicit (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_explicit (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/kubernetes/untagged_explicit_with_tags_provided (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine (0.03s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_explicit_with_tags_provided (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_answers (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_arguments,_accepting_provided (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/basic_arguments_override (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_implicit (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker+machine/untagged_explicit (0.00s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine (0.03s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh+machine/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker (0.05s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh (0.05s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/docker-ssh/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh (0.06s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/ssh/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom (0.06s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/untagged_explicit_with_tags_provided (0.02s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/custom/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels (0.06s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_implicit (0.02s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/parallels/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox (0.08s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_answers (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/virtualbox/untagged_implicit (0.02s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell (0.07s) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_implicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_explicit (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/untagged_explicit_with_tags_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/basic_answers (0.02s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/basic_arguments,_accepting_provided (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) --- PASS: TestAskRunnerOverrideDefaultsForExecutors/shell/basic_arguments_override (0.01s) register_integration_test.go:318: PASS: RegisterRunner(string,mock.argumentMatcher) PASS coverage: 7.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands 1.575s coverage: 7.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 2 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands' package with coverprofile in 'count' mode:  === RUN TestUnregisterOnFailure === RUN TestUnregisterOnFailure/registration_succeeds,_runner_left_registered time="2021-05-20T15:30:29Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:29Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: ssh, virtualbox, custom, shell, parallels, docker+machine, docker-ssh+machine, kubernetes, docker, docker-ssh: time="2021-05-20T15:30:29Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" === RUN TestUnregisterOnFailure/registration_fails,_LeaveRunner_is_false,_runner_is_unregistered time="2021-05-20T15:30:29Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:29Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: custom, shell, ssh, virtualbox, docker, docker-ssh, parallels, docker+machine, docker-ssh+machine, kubernetes: === RUN TestUnregisterOnFailure/registration_fails,_LeaveRunner_is_true,_runner_left_registered time="2021-05-20T15:30:29Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:29Z" level=info Enter the GitLab instance URL (for example, https://gitlab.com/): [https://gitlab.com]: Enter the registration token: Enter a description for the runner: [runner-ih9xd9p3-project-250833-concurrent-0]: Enter tags for the runner (comma-separated): Enter an executor: docker-ssh+machine, kubernetes, docker, docker-ssh, parallels, docker+machine, custom, shell, ssh, virtualbox: --- PASS: TestUnregisterOnFailure (0.03s) --- PASS: TestUnregisterOnFailure/registration_succeeds,_runner_left_registered (0.01s) register_integration_test.go:604: PASS: RegisterRunner(string,string) --- PASS: TestUnregisterOnFailure/registration_fails,_LeaveRunner_is_false,_runner_is_unregistered (0.01s) register_integration_test.go:582: PASS: RegisterRunner(string,string) register_integration_test.go:582: PASS: UnregisterRunner(mock.argumentMatcher) --- PASS: TestUnregisterOnFailure/registration_fails,_LeaveRunner_is_true,_runner_left_registered (0.01s) register_integration_test.go:582: PASS: RegisterRunner(string,string) === RUN TestRegisterCommand_FeatureFlag time="2021-05-20T15:30:29Z" level=info msg="Running in system-mode." time="2021-05-20T15:30:29Z" level=info time="2021-05-20T15:30:29Z" level=info msg="Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded!" --- PASS: TestRegisterCommand_FeatureFlag (0.00s) register_integration_test.go:634: PASS: RegisterRunner(string,string) === RUN TestGetServiceArguments === RUN TestGetServiceArguments/case-0 === RUN TestGetServiceArguments/case-1 === RUN TestGetServiceArguments/case-2 === RUN TestGetServiceArguments/case-3 === RUN TestGetServiceArguments/case-4 === RUN TestGetServiceArguments/case-5 --- PASS: TestGetServiceArguments (0.00s) --- PASS: TestGetServiceArguments/case-0 (0.00s) --- PASS: TestGetServiceArguments/case-1 (0.00s) --- PASS: TestGetServiceArguments/case-2 (0.00s) --- PASS: TestGetServiceArguments/case-3 (0.00s) --- PASS: TestGetServiceArguments/case-4 (0.00s) --- PASS: TestGetServiceArguments/case-5 (0.00s) PASS coverage: 3.3% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands 0.064s coverage: 3.3% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  go: downloading gocloud.dev v0.21.1-0.20201223184910-5094f54ed8bb go: downloading github.com/saracen/fastzip v0.1.5 go: downloading github.com/klauspost/pgzip v1.2.5 go: extracting github.com/saracen/fastzip v0.1.5 go: extracting github.com/klauspost/pgzip v1.2.5 go: downloading github.com/klauspost/compress v1.11.6 go: downloading github.com/saracen/zipextra v0.0.0-20201205103923-7347a2ee3f10 go: extracting github.com/saracen/zipextra v0.0.0-20201205103923-7347a2ee3f10 go: extracting gocloud.dev v0.21.1-0.20201223184910-5094f54ed8bb go: downloading golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 go: downloading github.com/google/wire v0.4.0 go: extracting golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 go: extracting github.com/google/wire v0.4.0 go: extracting github.com/klauspost/compress v1.11.6 go: finding github.com/saracen/fastzip v0.1.5 go: finding github.com/klauspost/pgzip v1.2.5 go: finding github.com/klauspost/compress v1.11.6 go: finding gocloud.dev v0.21.1-0.20201223184910-5094f54ed8bb go: finding github.com/saracen/zipextra v0.0.0-20201205103923-7347a2ee3f10 go: finding github.com/google/wire v0.4.0 go: finding golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 go: finding github.com/googleapis/gax-go v2.0.2+incompatible === RUN TestCompressionLevel === RUN TestCompressionLevel/default === RUN TestCompressionLevel/#00 === RUN TestCompressionLevel/invalid time="2021-05-20T15:30:34Z" level=warning msg="compression level \"invalid\" is invalid, falling back to default" === RUN TestCompressionLevel/fastest === RUN TestCompressionLevel/fast === RUN TestCompressionLevel/slow === RUN TestCompressionLevel/slowest --- PASS: TestCompressionLevel (0.00s) --- PASS: TestCompressionLevel/default (0.00s) --- PASS: TestCompressionLevel/#00 (0.00s) --- PASS: TestCompressionLevel/invalid (0.00s) --- PASS: TestCompressionLevel/fastest (0.00s) --- PASS: TestCompressionLevel/fast (0.00s) --- PASS: TestCompressionLevel/slow (0.00s) --- PASS: TestCompressionLevel/slowest (0.00s) === RUN TestArtifactsDownloaderRequirements Missing runner credentials--- PASS: TestArtifactsDownloaderRequirements (0.00s) === RUN TestArtifactsDownloader === RUN TestArtifactsDownloader/legacy === RUN TestArtifactsDownloader/legacy/download_forbidden permission denied=== RUN TestArtifactsDownloader/fastzip === RUN TestArtifactsDownloader/fastzip/download_forbidden permission denied=== RUN TestArtifactsDownloader/legacy#01 === RUN TestArtifactsDownloader/legacy#01/retries_are_called WARNING: Retrying...  error=invalid argument WARNING: Retrying...  error=invalid argument invalid argument=== RUN TestArtifactsDownloader/fastzip#01 === RUN TestArtifactsDownloader/fastzip#01/retries_are_called WARNING: Retrying...  error=invalid argument WARNING: Retrying...  error=invalid argument invalid argument=== RUN TestArtifactsDownloader/legacy#02 === RUN TestArtifactsDownloader/legacy#02/first_try_is_always_direct_download WARNING: Retrying...  error=invalid argument WARNING: Retrying...  error=invalid argument invalid argument=== RUN TestArtifactsDownloader/fastzip#02 === RUN TestArtifactsDownloader/fastzip#02/first_try_is_always_direct_download WARNING: Retrying...  error=invalid argument WARNING: Retrying...  error=invalid argument invalid argument=== RUN TestArtifactsDownloader/legacy#03 === RUN TestArtifactsDownloader/legacy#03/downloads_artifact_without_direct_download_if_requested === RUN TestArtifactsDownloader/fastzip#03 === RUN TestArtifactsDownloader/fastzip#03/downloads_artifact_without_direct_download_if_requested === RUN TestArtifactsDownloader/legacy#04 === RUN TestArtifactsDownloader/legacy#04/downloads_artifact_with_direct_download_if_requested === RUN TestArtifactsDownloader/fastzip#04 === RUN TestArtifactsDownloader/fastzip#04/downloads_artifact_with_direct_download_if_requested === RUN TestArtifactsDownloader/legacy#05 === RUN TestArtifactsDownloader/legacy#05/download_not_found file does not exist=== RUN TestArtifactsDownloader/fastzip#05 === RUN TestArtifactsDownloader/fastzip#05/download_not_found file does not exist--- PASS: TestArtifactsDownloader (0.01s) --- PASS: TestArtifactsDownloader/legacy (0.00s) --- PASS: TestArtifactsDownloader/legacy/download_forbidden (0.00s) --- PASS: TestArtifactsDownloader/fastzip (0.00s) --- PASS: TestArtifactsDownloader/fastzip/download_forbidden (0.00s) --- PASS: TestArtifactsDownloader/legacy#01 (0.00s) --- PASS: TestArtifactsDownloader/legacy#01/retries_are_called (0.00s) --- PASS: TestArtifactsDownloader/fastzip#01 (0.00s) --- PASS: TestArtifactsDownloader/fastzip#01/retries_are_called (0.00s) --- PASS: TestArtifactsDownloader/legacy#02 (0.00s) --- PASS: TestArtifactsDownloader/legacy#02/first_try_is_always_direct_download (0.00s) --- PASS: TestArtifactsDownloader/fastzip#02 (0.00s) --- PASS: TestArtifactsDownloader/fastzip#02/first_try_is_always_direct_download (0.00s) --- PASS: TestArtifactsDownloader/legacy#03 (0.00s) --- PASS: TestArtifactsDownloader/legacy#03/downloads_artifact_without_direct_download_if_requested (0.00s) --- PASS: TestArtifactsDownloader/fastzip#03 (0.00s) --- PASS: TestArtifactsDownloader/fastzip#03/downloads_artifact_without_direct_download_if_requested (0.00s) --- PASS: TestArtifactsDownloader/legacy#04 (0.00s) --- PASS: TestArtifactsDownloader/legacy#04/downloads_artifact_with_direct_download_if_requested (0.00s) --- PASS: TestArtifactsDownloader/fastzip#04 (0.00s) --- PASS: TestArtifactsDownloader/fastzip#04/downloads_artifact_with_direct_download_if_requested (0.00s) --- PASS: TestArtifactsDownloader/legacy#05 (0.00s) --- PASS: TestArtifactsDownloader/legacy#05/download_not_found (0.00s) --- PASS: TestArtifactsDownloader/fastzip#05 (0.00s) --- PASS: TestArtifactsDownloader/fastzip#05/download_not_found (0.00s) === RUN TestArtifactsUploaderRequirements Missing runner credentials--- PASS: TestArtifactsUploaderRequirements (0.00s) === RUN TestArtifactsUploaderTooLarge archive_file: found 1 matching files and directories too large--- PASS: TestArtifactsUploaderTooLarge (0.00s) === RUN TestArtifactsUploaderForbidden archive_file: found 1 matching files and directories permission denied--- PASS: TestArtifactsUploaderForbidden (0.00s) === RUN TestArtifactsUploaderRetry === RUN TestArtifactsUploaderRetry/legacy archive_file: found 1 matching files and directories WARNING: Retrying...  context=artifacts-uploader error=invalid argument WARNING: Retrying...  context=artifacts-uploader error=invalid argument invalid argument=== RUN TestArtifactsUploaderRetry/fastzip archive_file: found 1 matching files and directories WARNING: Retrying...  context=artifacts-uploader error=invalid argument WARNING: Retrying...  context=artifacts-uploader error=invalid argument invalid argument--- PASS: TestArtifactsUploaderRetry (6.00s) --- PASS: TestArtifactsUploaderRetry/legacy (3.00s) --- PASS: TestArtifactsUploaderRetry/fastzip (3.00s) === RUN TestArtifactsUploaderDefaultSucceeded === RUN TestArtifactsUploaderDefaultSucceeded/legacy archive_file: found 1 matching files and directories === RUN TestArtifactsUploaderDefaultSucceeded/fastzip archive_file: found 1 matching files and directories --- PASS: TestArtifactsUploaderDefaultSucceeded (0.00s) --- PASS: TestArtifactsUploaderDefaultSucceeded/legacy (0.00s) --- PASS: TestArtifactsUploaderDefaultSucceeded/fastzip (0.00s) === RUN TestArtifactsUploaderZipSucceeded === RUN TestArtifactsUploaderZipSucceeded/legacy archive_file: found 1 matching files and directories === RUN TestArtifactsUploaderZipSucceeded/fastzip archive_file: found 1 matching files and directories --- PASS: TestArtifactsUploaderZipSucceeded (0.00s) --- PASS: TestArtifactsUploaderZipSucceeded/legacy (0.00s) --- PASS: TestArtifactsUploaderZipSucceeded/fastzip (0.00s) === RUN TestArtifactsUploaderGzipSendsMultipleFiles archive_file: found 1 matching files and directories archive_file2: found 1 matching files and directories --- PASS: TestArtifactsUploaderGzipSendsMultipleFiles (0.00s) PASS coverage: 7.4% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 6.041s coverage: 7.4% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 1 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  === RUN TestArtifactsUploaderRawSucceeded archive_file: found 1 matching files and directories --- PASS: TestArtifactsUploaderRawSucceeded (0.00s) === RUN TestArtifactsUploaderRawDoesNotSendMultipleFiles archive_file: found 1 matching files and directories archive_file2: found 1 matching files and directories WARNING: Retrying...  context=artifacts-uploader error=invalid argument WARNING: Retrying...  context=artifacts-uploader error=invalid argument invalid argument--- PASS: TestArtifactsUploaderRawDoesNotSendMultipleFiles (3.00s) === RUN TestArtifactsUploaderNoFilesDoNotGenerateError ERROR: No files to upload  --- PASS: TestArtifactsUploaderNoFilesDoNotGenerateError (0.00s) === RUN TestArtifactsUploaderServiceUnavailable archive_file: found 1 matching files and directories WARNING: Retrying...  context=artifacts-uploader error=service unavailable WARNING: Retrying...  context=artifacts-uploader error=service unavailable WARNING: Retrying...  context=artifacts-uploader error=service unavailable WARNING: Retrying...  context=artifacts-uploader error=service unavailable WARNING: Retrying...  context=artifacts-uploader error=service unavailable service unavailable--- PASS: TestArtifactsUploaderServiceUnavailable (17.00s) === RUN TestArtifactsExcludedPaths archive_file: found 1 matching files and directories --- PASS: TestArtifactsExcludedPaths (0.00s) === RUN TestFileArchiverCompressionLevel === RUN TestFileArchiverCompressionLevel/fastest archive_file: found 1 matching files and directories === RUN TestFileArchiverCompressionLevel/fast archive_file: found 1 matching files and directories === RUN TestFileArchiverCompressionLevel/default archive_file: found 1 matching files and directories === RUN TestFileArchiverCompressionLevel/slow archive_file: found 1 matching files and directories === RUN TestFileArchiverCompressionLevel/slowest archive_file: found 1 matching files and directories --- PASS: TestFileArchiverCompressionLevel (0.00s) --- PASS: TestFileArchiverCompressionLevel/fastest (0.00s) artifacts_uploader_test.go:353: PASS: Archive(string,string) --- PASS: TestFileArchiverCompressionLevel/fast (0.00s) artifacts_uploader_test.go:353: PASS: Archive(string,string) --- PASS: TestFileArchiverCompressionLevel/default (0.00s) artifacts_uploader_test.go:353: PASS: Archive(string,string) --- PASS: TestFileArchiverCompressionLevel/slow (0.00s) artifacts_uploader_test.go:353: PASS: Archive(string,string) --- PASS: TestFileArchiverCompressionLevel/slowest (0.00s) artifacts_uploader_test.go:353: PASS: Archive(string,string) === RUN TestArtifactUploaderCommandShouldRetry === RUN TestArtifactUploaderCommandShouldRetry/retryable_error_service_unavailable,_over_max_errors_limit === RUN TestArtifactUploaderCommandShouldRetry/no_error,_first_try === RUN TestArtifactUploaderCommandShouldRetry/random_error,_first_try === RUN TestArtifactUploaderCommandShouldRetry/retryable_error,_first_try === RUN TestArtifactUploaderCommandShouldRetry/retryable_error,_max_tries === RUN TestArtifactUploaderCommandShouldRetry/retryable_error,_over_max_tries_limit === RUN TestArtifactUploaderCommandShouldRetry/retryable_error,_before_reaching_service_unavailable_tries === RUN TestArtifactUploaderCommandShouldRetry/retryable_error_service_unavailable,_max_tries --- PASS: TestArtifactUploaderCommandShouldRetry (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error_service_unavailable,_over_max_errors_limit (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/no_error,_first_try (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/random_error,_first_try (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error,_first_try (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error,_max_tries (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error,_over_max_tries_limit (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error,_before_reaching_service_unavailable_tries (0.00s) --- PASS: TestArtifactUploaderCommandShouldRetry/retryable_error_service_unavailable,_max_tries (0.00s) === RUN TestCacheExtractorValidArchive === RUN TestCacheExtractorValidArchive/legacy No URL provided, cache will not be downloaded from shared cache server. Instead a local version of cache will be extracted. === RUN TestCacheExtractorValidArchive/fastzip No URL provided, cache will not be downloaded from shared cache server. Instead a local version of cache will be extracted. --- PASS: TestCacheExtractorValidArchive (0.00s) --- PASS: TestCacheExtractorValidArchive/legacy (0.00s) --- PASS: TestCacheExtractorValidArchive/fastzip (0.00s) === RUN TestCacheExtractorForInvalidArchive === RUN TestCacheExtractorForInvalidArchive/legacy No URL provided, cache will not be downloaded from shared cache server. Instead a local version of cache will be extracted. zip: not a valid zip file=== RUN TestCacheExtractorForInvalidArchive/fastzip No URL provided, cache will not be downloaded from shared cache server. Instead a local version of cache will be extracted. zip: not a valid zip file--- PASS: TestCacheExtractorForInvalidArchive (0.00s) --- PASS: TestCacheExtractorForInvalidArchive/legacy (0.00s) --- PASS: TestCacheExtractorForInvalidArchive/fastzip (0.00s) === RUN TestCacheExtractorForIfNoFileDefined Missing cache file--- PASS: TestCacheExtractorForIfNoFileDefined (0.00s) PASS coverage: 6.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 20.031s coverage: 6.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 2 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  === RUN TestCacheExtractorForNotExistingFile No URL provided, cache will not be downloaded from shared cache server. Instead a local version of cache will be extracted. --- PASS: TestCacheExtractorForNotExistingFile (0.00s) === RUN TestCacheExtractorRemoteServerNotFound file does not exist--- PASS: TestCacheExtractorRemoteServerNotFound (0.00s) === RUN TestCacheExtractorRemoteServerTimedOut --- PASS: TestCacheExtractorRemoteServerTimedOut (0.05s) === RUN TestCacheExtractorRemoteServer Downloading archive.zip from http://127.0.0.1:34093/cache.zip archive.zip is up to date  --- PASS: TestCacheExtractorRemoteServer (0.00s) === RUN TestCacheExtractorRemoteServerFailOnInvalidServer Get http://localhost:65333/cache.zip: dial tcp [::1]:65333: connect: connection refused--- PASS: TestCacheExtractorRemoteServerFailOnInvalidServer (0.00s) === RUN TestGlobbedFilePaths foo/**/*.txt: found 3 matching files and directories --- PASS: TestGlobbedFilePaths (0.00s) === RUN TestExcludedFilePaths foo/test/: found 5 matching files and directories  foo/**/*.md: excluded 2 files  foo/test/bar/baz/3.txt: excluded 1 files  --- PASS: TestExcludedFilePaths (0.00s) === RUN TestCacheArchiverAddingUntrackedFiles untracked: found 2 files  --- PASS: TestCacheArchiverAddingUntrackedFiles (0.00s) === RUN TestCacheArchiverAddingUntrackedUnicodeFiles untracked: found 1 files  --- PASS: TestCacheArchiverAddingUntrackedUnicodeFiles (0.00s) === RUN TestCacheArchiverAddingFile untracked_test_file.txt: found 1 matching files and directories --- PASS: TestCacheArchiverAddingFile (0.00s) PASS coverage: 5.2% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 0.082s coverage: 5.2% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 3 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  === RUN TestFileArchiverToFailOnAbsoluteFile time="2021-05-20T15:31:08Z" level=warning msg="/absolute.txt: no matching files" --- PASS: TestFileArchiverToFailOnAbsoluteFile (0.00s) === RUN TestFileArchiverToFailOnRelativeFile time="2021-05-20T15:31:08Z" level=warning msg="../../../relative.txt: no matching files" --- PASS: TestFileArchiverToFailOnRelativeFile (0.00s) === RUN TestFileArchiverToAddNotExistingFile time="2021-05-20T15:31:08Z" level=warning msg="not_existing_file.txt: no matching files" --- PASS: TestFileArchiverToAddNotExistingFile (0.00s) === RUN TestFileArchiverChanged time="2021-05-20T15:31:08Z" level=info msg="untracked_test_file.txt: found 1 matching files and directories" --- PASS: TestFileArchiverChanged (0.00s) === RUN TestFileArchiverFileIsNotChanged time="2021-05-20T15:31:08Z" level=info msg="untracked_test_file.txt: found 1 matching files and directories" --- PASS: TestFileArchiverFileIsNotChanged (0.00s) === RUN TestFileArchiverFileIsChanged time="2021-05-20T15:31:08Z" level=info msg="untracked_test_file.txt: found 1 matching files and directories" --- PASS: TestFileArchiverFileIsChanged (0.00s) === RUN TestFileArchiverFileDoesNotExist time="2021-05-20T15:31:08Z" level=info msg="untracked_test_file.txt: found 1 matching files and directories" --- PASS: TestFileArchiverFileDoesNotExist (0.00s) === RUN TestServiceWaiterCommand_NoEnvironmentVariables No HOST or PORT found--- PASS: TestServiceWaiterCommand_NoEnvironmentVariables (0.00s) === RUN TestHealthCheckCommand_Execute === RUN TestHealthCheckCommand_Execute/Successful_connect waiting for TCP connection to 127.0.0.1:42173...=== RUN TestHealthCheckCommand_Execute/Unsuccessful_connect_because_service_is_down waiting for TCP connection to 127.0.0.1:44437...--- PASS: TestHealthCheckCommand_Execute (2.00s) --- PASS: TestHealthCheckCommand_Execute/Successful_connect (0.00s) --- PASS: TestHealthCheckCommand_Execute/Unsuccessful_connect_because_service_is_down (2.00s) === RUN TestNewReadLogsCommandFileNotExist --- PASS: TestNewReadLogsCommandFileNotExist (2.00s) PASS coverage: 2.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 4.025s coverage: 2.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 4 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  === RUN TestNewReadLogsCommandNoAttempts --- PASS: TestNewReadLogsCommandNoAttempts (0.00s) === RUN TestNewReadLogsCommandFileSeekToInvalidLocation --- PASS: TestNewReadLogsCommandFileSeekToInvalidLocation (0.00s) === RUN TestNewReadLogsCommandFileLogStreamProviderCorrect --- PASS: TestNewReadLogsCommandFileLogStreamProviderCorrect (1.00s) === RUN TestNewReadLogsCommandLines --- PASS: TestNewReadLogsCommandLines (0.50s) read_logs_test.go:107: PASS: Open() read_logs_test.go:107: PASS: Write(string) read_logs_test.go:107: PASS: Write(string) read_logs_test.go:107: PASS: Write(string) === RUN TestNewReadLogsCommandWriteLinesWithDelay --- PASS: TestNewReadLogsCommandWriteLinesWithDelay (5.50s) read_logs_test.go:171: PASS: Open() read_logs_test.go:171: PASS: Write(string) read_logs_test.go:171: PASS: Write(string) read_logs_test.go:171: PASS: Write(string) read_logs_test.go:171: PASS: Write(string) read_logs_test.go:171: PASS: Write(string) read_logs_test.go:171: PASS: Write(string) === RUN TestSplitLinesAccordingToBufferSize --- PASS: TestSplitLinesAccordingToBufferSize (0.50s) read_logs_test.go:211: PASS: Open() read_logs_test.go:211: PASS: Write(string) read_logs_test.go:211: PASS: Write(string) read_logs_test.go:211: PASS: Write(string) read_logs_test.go:211: PASS: Write(string) read_logs_test.go:211: PASS: Write(string) === RUN TestSeek --- PASS: TestSeek (0.50s) read_logs_test.go:247: PASS: Open() read_logs_test.go:247: PASS: Write(string) === RUN TestDoRetry === RUN TestDoRetry/Error_is_of_type_retryableErr time="2021-05-20T15:31:22Z" level=warning msg=Retrying... error=error time="2021-05-20T15:31:22Z" level=warning msg=Retrying... error=error time="2021-05-20T15:31:22Z" level=warning msg=Retrying... error=error === RUN TestDoRetry/Error_is_not_type_of_retryableErr === RUN TestDoRetry/Error_is_nil --- PASS: TestDoRetry (0.00s) --- PASS: TestDoRetry/Error_is_of_type_retryableErr (0.00s) --- PASS: TestDoRetry/Error_is_not_type_of_retryableErr (0.00s) --- PASS: TestDoRetry/Error_is_nil (0.00s) === RUN TestCacheArchiverIsUpToDate === RUN TestCacheArchiverIsUpToDate/legacy archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. archive_file: found 1 matching files and directories Archive is up to date!  archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. === RUN TestCacheArchiverIsUpToDate/fastzip archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. archive_file: found 1 matching files and directories Archive is up to date!  archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. --- PASS: TestCacheArchiverIsUpToDate (2.01s) --- PASS: TestCacheArchiverIsUpToDate/legacy (1.00s) --- PASS: TestCacheArchiverIsUpToDate/fastzip (1.00s) === RUN TestCacheArchiverForIfNoFileDefined Missing --file--- PASS: TestCacheArchiverForIfNoFileDefined (0.00s) PASS coverage: 5.4% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 10.034s coverage: 5.4% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 5 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers' package with coverprofile in 'count' mode:  === RUN TestCacheArchiverRemoteServerNotFound Uploading archive.zip to http://127.0.0.1:41011/invalid-file.zip received: 404 Not Found--- PASS: TestCacheArchiverRemoteServerNotFound (0.00s) === RUN TestCacheArchiverRemoteServer Uploading archive.zip to http://127.0.0.1:44209/cache.zip --- PASS: TestCacheArchiverRemoteServer (0.00s) === RUN TestCacheArchiverGoCloudRemoteServer Uploading archive.zip to testblob://bucket/path/to/cache.zip --- PASS: TestCacheArchiverGoCloudRemoteServer (0.00s) === RUN TestCacheArchiverRemoteServerWithHeaders Uploading archive.zip to http://127.0.0.1:33063/cache.zip --- PASS: TestCacheArchiverRemoteServerWithHeaders (0.00s) === RUN TestCacheArchiverRemoteServerTimedOut --- PASS: TestCacheArchiverRemoteServerTimedOut (0.05s) === RUN TestCacheArchiverRemoteServerFailOnInvalidServer Uploading archive.zip to http://localhost:65333/cache.zip Put http://localhost:65333/cache.zip: dial tcp [::1]:65333: connect: connection refused--- PASS: TestCacheArchiverRemoteServerFailOnInvalidServer (0.00s) === RUN TestCacheArchiverCompressionLevel === RUN TestCacheArchiverCompressionLevel/fastest archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. === RUN TestCacheArchiverCompressionLevel/fast archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. === RUN TestCacheArchiverCompressionLevel/default archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. === RUN TestCacheArchiverCompressionLevel/slow archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. === RUN TestCacheArchiverCompressionLevel/slowest archive_file: found 1 matching files and directories No URL provided, cache will be not uploaded to shared cache server. Cache will be stored only locally. --- PASS: TestCacheArchiverCompressionLevel (0.00s) --- PASS: TestCacheArchiverCompressionLevel/fastest (0.00s) cache_archiver_integration_test.go:205: PASS: Archive(string,string) --- PASS: TestCacheArchiverCompressionLevel/fast (0.00s) cache_archiver_integration_test.go:205: PASS: Archive(string,string) --- PASS: TestCacheArchiverCompressionLevel/default (0.00s) cache_archiver_integration_test.go:205: PASS: Archive(string,string) --- PASS: TestCacheArchiverCompressionLevel/slow (0.00s) cache_archiver_integration_test.go:205: PASS: Archive(string,string) --- PASS: TestCacheArchiverCompressionLevel/slowest (0.00s) cache_archiver_integration_test.go:205: PASS: Archive(string,string) === RUN TestCacheInit --- PASS: TestCacheInit (0.00s) === RUN TestCacheInit_NoArguments No arguments passed, at least 1 path is required.--- PASS: TestCacheInit_NoArguments (0.00s) PASS coverage: 4.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers 0.080s coverage: 4.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive' package with coverprofile in 'count' mode:  === RUN TestDefaultRegistration === RUN TestDefaultRegistration/raw === RUN TestDefaultRegistration/gzip === RUN TestDefaultRegistration/zip --- PASS: TestDefaultRegistration (0.00s) --- PASS: TestDefaultRegistration/raw (0.00s) --- PASS: TestDefaultRegistration/gzip (0.00s) --- PASS: TestDefaultRegistration/zip (0.00s) === RUN TestRegister --- PASS: TestRegister (0.00s) === RUN TestRegisterOverride --- PASS: TestRegisterOverride (0.00s) PASS coverage: 6.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers/archive 0.003s coverage: 6.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter' package with coverprofile in 'count' mode:  === RUN TestFormatByteRate === RUN TestFormatByteRate/format_megabytes === RUN TestFormatByteRate/format_kilobytes_exact === RUN TestFormatByteRate/format_terabytes_over === RUN TestFormatByteRate/format_exabytes_exact === RUN TestFormatByteRate/format_terabytes_under === RUN TestFormatByteRate/format_gigabytes_over === RUN TestFormatByteRate/format_megabytes_exact === RUN TestFormatByteRate/format_bytes === RUN TestFormatByteRate/format_megabytes_over === RUN TestFormatByteRate/format_terabytes_exact === RUN TestFormatByteRate/format_bytes_(non-second) === RUN TestFormatByteRate/format_kilobytes_over === RUN TestFormatByteRate/format_exabytes_over === RUN TestFormatByteRate/format_exabytes === RUN TestFormatByteRate/format_kilobytes_under === RUN TestFormatByteRate/format_megabytes_under === RUN TestFormatByteRate/format_kilobytes === RUN TestFormatByteRate/format_gigabytes === RUN TestFormatByteRate/format_petabytes_exact === RUN TestFormatByteRate/format_petabytes_over === RUN TestFormatByteRate/format_gigabytes_exact === RUN TestFormatByteRate/format_gigabytes_under === RUN TestFormatByteRate/format_petabytes_under === RUN TestFormatByteRate/format_exabytes_under === RUN TestFormatByteRate/format_bytes_(zero-second) === RUN TestFormatByteRate/format_terabytes === RUN TestFormatByteRate/format_petabytes --- PASS: TestFormatByteRate (0.00s) --- PASS: TestFormatByteRate/format_megabytes (0.00s) --- PASS: TestFormatByteRate/format_kilobytes_exact (0.00s) --- PASS: TestFormatByteRate/format_terabytes_over (0.00s) --- PASS: TestFormatByteRate/format_exabytes_exact (0.00s) --- PASS: TestFormatByteRate/format_terabytes_under (0.00s) --- PASS: TestFormatByteRate/format_gigabytes_over (0.00s) --- PASS: TestFormatByteRate/format_megabytes_exact (0.00s) --- PASS: TestFormatByteRate/format_bytes (0.00s) --- PASS: TestFormatByteRate/format_megabytes_over (0.00s) --- PASS: TestFormatByteRate/format_terabytes_exact (0.00s) --- PASS: TestFormatByteRate/format_bytes_(non-second) (0.00s) --- PASS: TestFormatByteRate/format_kilobytes_over (0.00s) --- PASS: TestFormatByteRate/format_exabytes_over (0.00s) --- PASS: TestFormatByteRate/format_exabytes (0.00s) --- PASS: TestFormatByteRate/format_kilobytes_under (0.00s) --- PASS: TestFormatByteRate/format_megabytes_under (0.00s) --- PASS: TestFormatByteRate/format_kilobytes (0.00s) --- PASS: TestFormatByteRate/format_gigabytes (0.00s) --- PASS: TestFormatByteRate/format_petabytes_exact (0.00s) --- PASS: TestFormatByteRate/format_petabytes_over (0.00s) --- PASS: TestFormatByteRate/format_gigabytes_exact (0.00s) --- PASS: TestFormatByteRate/format_gigabytes_under (0.00s) --- PASS: TestFormatByteRate/format_petabytes_under (0.00s) --- PASS: TestFormatByteRate/format_exabytes_under (0.00s) --- PASS: TestFormatByteRate/format_bytes_(zero-second) (0.00s) --- PASS: TestFormatByteRate/format_terabytes (0.00s) --- PASS: TestFormatByteRate/format_petabytes (0.00s) === RUN TestFormatBytes === RUN TestFormatBytes/format_bytes === RUN TestFormatBytes/format_kilobytes === RUN TestFormatBytes/format_megabytes === RUN TestFormatBytes/format_gigabytes === RUN TestFormatBytes/format_terabytes === RUN TestFormatBytes/format_petabytes === RUN TestFormatBytes/format_exabytes --- PASS: TestFormatBytes (0.00s) --- PASS: TestFormatBytes/format_bytes (0.00s) --- PASS: TestFormatBytes/format_kilobytes (0.00s) --- PASS: TestFormatBytes/format_megabytes (0.00s) --- PASS: TestFormatBytes/format_gigabytes (0.00s) --- PASS: TestFormatBytes/format_terabytes (0.00s) --- PASS: TestFormatBytes/format_petabytes (0.00s) --- PASS: TestFormatBytes/format_exabytes (0.00s) === RUN TestLabelledRateFormat === RUN TestLabelledRateFormat/unknown_total_size_undone === RUN TestLabelledRateFormat/unknown_total_size_done === RUN TestLabelledRateFormat/known_total_size_undone === RUN TestLabelledRateFormat/known_total_size_done --- PASS: TestLabelledRateFormat (0.00s) --- PASS: TestLabelledRateFormat/unknown_total_size_undone (0.00s) --- PASS: TestLabelledRateFormat/unknown_total_size_done (0.00s) --- PASS: TestLabelledRateFormat/known_total_size_undone (0.00s) --- PASS: TestLabelledRateFormat/known_total_size_done (0.00s) === RUN TestReader_New_NoUpdateFrequency --- PASS: TestReader_New_NoUpdateFrequency (0.00s) === RUN TestReader_New --- PASS: TestReader_New (0.00s) === RUN TestWriter_New_NoUpdateFrequency --- PASS: TestWriter_New_NoUpdateFrequency (0.00s) === RUN TestWriter_New --- PASS: TestWriter_New (0.00s) PASS coverage: 100.0% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/commands/helpers/meter 0.005s coverage: 100.0% of statements in gitlab.com/gitlab-org/gitlab-runner/... section_end:1621524688:step_script section_start:1621524688:archive_cache Saving cache for successful job Creating cache unit test 1/8-v13-12-0-2... WARNING: /builds/gitlab-org/gitlab-runner/.gocache-false/: no matching files Uploading cache.zip to https://storage.googleapis.com/gitlab-org-ci-runners-cache/project/250833/unit%20test%201/8-v13-12-0-2 Created cache section_end:1621524689:archive_cache section_start:1621524689:upload_artifacts_on_success Uploading artifacts for successful job Uploading artifacts... .cover/*: found 15 matching files and directories  .testoutput/*: found 15 matching files and directories Uploading artifacts as "archive" to coordinator... ok id=1280281226 status=201 token=LzAr9-as section_end:1621524691:upload_artifacts_on_success section_start:1621524691:cleanup_file_variables Cleaning up file based variables section_end:1621524692:cleanup_file_variables Job succeeded  ================================================ FILE: common/buildlogger/internal/testdata/corpus/log-2 ================================================ Running with gitlab-runner 13.12.0-rc1 (b21d5c5b)  on gitlab-org-docker pVR9XBDq  feature flags: FF_GITLAB_REGISTRY_HELPER_IMAGE:true, FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE:true section_start:1621524382:resolve_secrets Resolving secrets section_end:1621524382:resolve_secrets section_start:1621524382:prepare_executor Preparing the "docker+machine" executor Using Docker executor with image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 ... Starting service docker:20.10.2-dind ... Pulling docker image docker:20.10.2-dind ... Using docker image sha256:7569a61fe0d5af655280b516bb2654a1ef03f7a3d67549543b65d81dbeea372e for docker:20.10.2-dind with digest docker@sha256:8f4e9ddda1049e6935f9fc7f5cad0bd1001fbf59188616f19b620fd7b6e95ba2 ... Waiting for services to be up and running... Authenticating with credentials from job payload (GitLab Registry) Pulling docker image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 ... Using docker image sha256:ae3c432ccac98231f52393c158c545eb689584defed228600b87e2fe4e4fa1e9 for registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 with digest registry.gitlab.com/gitlab-org/gitlab-runner/ci@sha256:0436a4d75851db641f3c704688e0e27a3e208f4bc948503c1b35b7e1691b5cf6 ... section_end:1621524429:prepare_executor section_start:1621524429:prepare_script Preparing environment Running on runner-pvr9xbdq-project-250833-concurrent-0 via runner-pvr9xbdq-org-ci-1621524328-43d1c5e0... section_end:1621524456:prepare_script section_start:1621524456:get_sources Getting source from Git repository $ eval "$CI_PRE_CLONE_SCRIPT" Fetching changes... Initialized empty Git repository in /builds/gitlab-org/gitlab-runner/.git/ Created fresh repository. Checking out 7a6612da as v13.12.0... Skipping Git submodules setup section_end:1621524466:get_sources section_start:1621524466:restore_cache Restoring cache Checking cache for unit test 2/8-v13-12-0-2... FATAL: file does not exist  Failed to extract cache section_end:1621524467:restore_cache section_start:1621524467:download_artifacts Downloading artifacts Downloading artifacts for helper images (1280281190)... Downloading artifacts from coordinator... ok  id=1280281190 status=200 token=zaM3ywFV Downloading artifacts for clone test repo (1280281192)... Downloading artifacts from coordinator... ok  id=1280281192 status=200 token=xzA1hsVL Downloading artifacts for tests definitions (1280281194)... Downloading artifacts from coordinator... ok  id=1280281194 status=200 token=kQK1ELdZ section_end:1621524483:download_artifacts section_start:1621524483:step_script Executing "step_script" stage of the job script Using docker image sha256:ae3c432ccac98231f52393c158c545eb689584defed228600b87e2fe4e4fa1e9 for registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.13.8-12 with digest registry.gitlab.com/gitlab-org/gitlab-runner/ci@sha256:0436a4d75851db641f3c704688e0e27a3e208f4bc948503c1b35b7e1691b5cf6 ... $ mkdir -p "$GOCACHE" $ source ci/touch_make_dependencies touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64-windows.exe touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.x86_64 touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.s390x touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.arm touching out/binaries/gitlab-runner-helper/gitlab-runner-helper.arm64 touching out/helper-images/prebuilt-arm64.tar.xz touching out/helper-images/prebuilt-arm.tar.xz touching out/helper-images/prebuilt-s390x.tar.xz touching out/helper-images/prebuilt-x86_64.tar.xz touching out/helper-images/prebuilt-x86_64-pwsh.tar.xz $ make parallel_test_execute # Pulling images required for some tests go: downloading k8s.io/api v0.0.0-20191004102349-159aefb8556b go: downloading github.com/prometheus/common v0.6.0 go: downloading github.com/docker/docker v20.10.2+incompatible go: downloading github.com/prometheus/client_golang v1.1.0 go: downloading gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: downloading gopkg.in/yaml.v2 v2.3.0 go: downloading github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: downloading github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: extracting gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: extracting github.com/prometheus/common v0.6.0 go: extracting github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: extracting k8s.io/api v0.0.0-20191004102349-159aefb8556b go: extracting github.com/prometheus/client_golang v1.1.0 go: extracting gopkg.in/yaml.v2 v2.3.0 go: downloading github.com/docker/go-connections v0.3.0 go: extracting github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: downloading github.com/sirupsen/logrus v1.7.0 go: extracting github.com/sirupsen/logrus v1.7.0 go: extracting github.com/docker/go-connections v0.3.0 go: downloading github.com/stretchr/objx v0.3.0 go: downloading github.com/gorilla/websocket v1.4.2 go: extracting github.com/stretchr/objx v0.3.0 go: downloading github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: extracting github.com/gorilla/websocket v1.4.2 go: downloading github.com/pmezard/go-difflib v1.0.0 go: downloading k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: extracting github.com/pmezard/go-difflib v1.0.0 go: extracting github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: downloading github.com/matttproud/golang_protobuf_extensions v1.0.1 go: extracting k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: extracting github.com/matttproud/golang_protobuf_extensions v1.0.1 go: downloading github.com/beorn7/perks v1.0.1 go: extracting github.com/beorn7/perks v1.0.1 go: downloading github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: extracting github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: downloading golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: downloading github.com/prometheus/procfs v0.0.5 go: downloading github.com/davecgh/go-spew v1.1.1 go: extracting github.com/davecgh/go-spew v1.1.1 go: downloading golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: extracting github.com/prometheus/procfs v0.0.5 go: downloading github.com/golang/protobuf v1.4.3 go: extracting github.com/golang/protobuf v1.4.3 go: extracting github.com/docker/docker v20.10.2+incompatible go: downloading google.golang.org/protobuf v1.25.0 go: extracting golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: extracting golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: downloading github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: extracting github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: downloading github.com/pkg/errors v0.9.1 go: extracting google.golang.org/protobuf v1.25.0 go: extracting github.com/pkg/errors v0.9.1 go: downloading k8s.io/klog v1.0.0 go: extracting k8s.io/klog v1.0.0 go: downloading github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: extracting github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: downloading github.com/gogo/protobuf v1.1.1 go: downloading gopkg.in/inf.v0 v0.9.0 go: downloading github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: extracting gopkg.in/inf.v0 v0.9.0 go: downloading gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: extracting github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: downloading github.com/google/gofuzz v1.0.0 go: extracting gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: extracting github.com/google/gofuzz v1.0.0 go: downloading github.com/BurntSushi/toml v0.3.1 go: extracting github.com/BurntSushi/toml v0.3.1 go: downloading github.com/json-iterator/go v1.1.10 go: downloading golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: downloading github.com/urfave/cli v1.20.0 go: downloading github.com/opencontainers/image-spec v1.0.1 go: downloading github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: downloading github.com/morikuni/aec v1.0.0 go: downloading golang.org/x/text v0.3.6 go: extracting github.com/json-iterator/go v1.1.10 go: extracting github.com/gogo/protobuf v1.1.1 go: downloading github.com/hashicorp/vault/api v1.0.4 go: extracting github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: extracting github.com/urfave/cli v1.20.0 go: downloading github.com/containerd/containerd v1.4.3 go: extracting github.com/opencontainers/image-spec v1.0.1 go: extracting github.com/morikuni/aec v1.0.0 go: extracting github.com/hashicorp/vault/api v1.0.4 go: downloading github.com/docker/distribution v2.7.0+incompatible go: downloading google.golang.org/grpc v1.34.0 go: extracting golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: extracting github.com/docker/distribution v2.7.0+incompatible go: extracting github.com/containerd/containerd v1.4.3 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/hashicorp/go-retryablehttp v0.5.4 go: downloading golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: extracting google.golang.org/grpc v1.34.0 go: extracting github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/hashicorp/go-rootcerts v1.0.1 go: extracting github.com/hashicorp/go-retryablehttp v0.5.4 go: downloading github.com/mitchellh/mapstructure v1.4.0 go: extracting golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: downloading github.com/modern-go/reflect2 v1.0.1 go: extracting github.com/hashicorp/go-rootcerts v1.0.1 go: downloading gopkg.in/square/go-jose.v2 v2.3.1 go: extracting github.com/mitchellh/mapstructure v1.4.0 go: downloading github.com/hashicorp/go-multierror v1.0.0 go: downloading github.com/hashicorp/hcl v1.0.0 go: extracting gopkg.in/square/go-jose.v2 v2.3.1 go: extracting github.com/hashicorp/go-multierror v1.0.0 go: downloading github.com/hashicorp/vault/sdk v0.1.13 go: extracting github.com/modern-go/reflect2 v1.0.1 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: extracting github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: extracting github.com/hashicorp/hcl v1.0.0 go: extracting github.com/hashicorp/vault/sdk v0.1.13 go: downloading google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: downloading github.com/opencontainers/go-digest v1.0.0-rc1 go: downloading github.com/hashicorp/go-sockaddr v1.0.2 go: downloading github.com/ryanuber/go-glob v1.0.0 go: downloading github.com/hashicorp/go-cleanhttp v0.5.1 go: extracting github.com/opencontainers/go-digest v1.0.0-rc1 go: downloading github.com/pierrec/lz4 v2.0.5+incompatible go: downloading github.com/golang/snappy v0.0.1 go: extracting github.com/ryanuber/go-glob v1.0.0 go: extracting github.com/hashicorp/go-sockaddr v1.0.2 go: extracting github.com/golang/snappy v0.0.1 go: extracting github.com/hashicorp/go-cleanhttp v0.5.1 go: extracting golang.org/x/text v0.3.6 go: extracting github.com/pierrec/lz4 v2.0.5+incompatible go: extracting google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: finding github.com/BurntSushi/toml v0.3.1 go: finding github.com/docker/go-units v0.3.2-0.20160802145505-eb879ae3e2b8 go: finding github.com/prometheus/client_golang v1.1.0 go: finding github.com/beorn7/perks v1.0.1 go: finding github.com/golang/protobuf v1.4.3 go: finding google.golang.org/protobuf v1.25.0 go: finding github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 go: finding github.com/prometheus/common v0.6.0 go: finding github.com/matttproud/golang_protobuf_extensions v1.0.1 go: finding github.com/prometheus/procfs v0.0.5 go: finding github.com/sirupsen/logrus v1.7.0 go: finding golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6 go: finding github.com/stretchr/testify v1.6.2-0.20200720104044-95a9d909e987 go: finding github.com/davecgh/go-spew v1.1.1 go: finding github.com/pmezard/go-difflib v1.0.0 go: finding github.com/stretchr/objx v0.3.0 go: finding gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776 go: finding github.com/tevino/abool v0.0.0-20160628101133-3c25f2fe7cd0 go: finding github.com/urfave/cli v1.20.0 go: finding gitlab.com/ayufan/golang-cli-helpers v0.0.0-20171103152739-a7cf72d604cd go: finding github.com/docker/docker v20.10.2+incompatible go: finding gopkg.in/yaml.v2 v2.3.0 go: finding k8s.io/apimachinery v0.0.0-20191004074956-c5d2f014d689 go: finding github.com/docker/go-connections v0.3.0 go: finding github.com/opencontainers/image-spec v1.0.1 go: finding github.com/opencontainers/go-digest v1.0.0-rc1 go: finding github.com/gogo/protobuf v1.1.1 go: finding github.com/containerd/containerd v1.4.3 go: finding github.com/pkg/errors v0.9.1 go: finding google.golang.org/grpc v1.34.0 go: finding google.golang.org/genproto v0.0.0-20201203001206-6486ece9c497 go: finding github.com/docker/distribution v2.7.0+incompatible go: finding golang.org/x/net v0.0.0-20201224014010-6772e930b67b go: finding github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 go: finding github.com/morikuni/aec v1.0.0 go: finding github.com/docker/machine v0.7.1-0.20170120224952-7b7a141da844 go: finding golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad go: finding github.com/gorhill/cronexpr v0.0.0-20160318121724-f0984319b442 go: finding github.com/hashicorp/vault/api v1.0.4 go: finding github.com/hashicorp/errwrap v1.0.0 go: finding github.com/hashicorp/go-cleanhttp v0.5.1 go: finding github.com/hashicorp/go-multierror v1.0.0 go: finding github.com/hashicorp/go-retryablehttp v0.5.4 go: finding github.com/hashicorp/go-rootcerts v1.0.1 go: finding github.com/hashicorp/hcl v1.0.0 go: finding github.com/hashicorp/vault/sdk v0.1.13 go: finding github.com/golang/snappy v0.0.1 go: finding github.com/pierrec/lz4 v2.0.5+incompatible go: finding github.com/hashicorp/go-sockaddr v1.0.2 go: finding github.com/ryanuber/go-glob v1.0.0 go: finding github.com/mitchellh/mapstructure v1.4.0 go: finding golang.org/x/text v0.3.6 go: finding golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e go: finding gopkg.in/square/go-jose.v2 v2.3.1 go: finding github.com/json-iterator/go v1.1.10 go: finding github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: finding github.com/modern-go/reflect2 v1.0.1 go: finding github.com/gorilla/mux v1.3.1-0.20170228224354-599cba5e7b61 go: finding github.com/gorilla/websocket v1.4.2 go: finding k8s.io/api v0.0.0-20191004102349-159aefb8556b go: finding gopkg.in/inf.v0 v0.9.0 go: finding github.com/google/gofuzz v1.0.0 go: finding k8s.io/klog v1.0.0 [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] latest: Pulling from gitlab-org/gitlab-runner/alpine-no-root [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Pulling fs layer [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Pulling fs layer [docker:18-git] 18-git: Pulling from library/docker [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Verifying Checksum [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Download complete [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Download complete [alpine:3.12.0] 3.12.0: Pulling from library/alpine [docker:18-dind] 18-dind: Pulling from library/docker [docker:18-git] 9d48c3bd43c5: Pulling fs layer [docker:18-git] 7f94eaf8af20: Pulling fs layer [docker:18-git] 9fe9984849c1: Pulling fs layer [docker:18-git] 3091f1b4f1aa: Pulling fs layer [docker:18-git] 6ef266ac0949: Pulling fs layer [docker:18-git] b2c2c13f4c08: Pulling fs layer [docker:18-git] f354b3ae6d74: Pulling fs layer [docker:18-git] 6ab2580d9dce: Pulling fs layer [docker:18-git] 3091f1b4f1aa: Waiting [docker:18-git] 6ef266ac0949: Waiting [docker:18-git] b2c2c13f4c08: Waiting [docker:18-git] f354b3ae6d74: Waiting [docker:18-git] 6ab2580d9dce: Waiting [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] df20fa9351a1: Pull complete [alpine:3.12.0] df20fa9351a1: Already exists [docker:18-dind] 9d48c3bd43c5: Pulling fs layer [docker:18-dind] 7f94eaf8af20: Pulling fs layer [docker:18-dind] 9fe9984849c1: Pulling fs layer [docker:18-dind] 3091f1b4f1aa: Pulling fs layer [docker:18-dind] 6ef266ac0949: Pulling fs layer [docker:18-dind] b2c2c13f4c08: Pulling fs layer [docker:18-dind] f354b3ae6d74: Pulling fs layer [docker:18-dind] 8f4a6170836f: Pulling fs layer [docker:18-dind] 853fedec02a1: Pulling fs layer [docker:18-dind] a57a377d7e5d: Pulling fs layer [docker:18-dind] ac4bc61da695: Pulling fs layer [docker:18-dind] 3091f1b4f1aa: Waiting [docker:18-dind] 6ef266ac0949: Waiting [docker:18-dind] b2c2c13f4c08: Waiting [docker:18-dind] f354b3ae6d74: Waiting [docker:18-dind] 8f4a6170836f: Waiting [docker:18-dind] 853fedec02a1: Waiting [docker:18-dind] a57a377d7e5d: Waiting [docker:18-dind] ac4bc61da695: Waiting [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] c7e9d654d1d6: Pull complete [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] Digest: sha256:034971042d77defbcd01dbc1c163b5cf03397bc3ab5228b0943e019eb9f5f824 [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] Status: Downloaded newer image for registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest [registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest] registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest [docker:18-git] 9fe9984849c1: Verifying Checksum [docker:18-dind] 9fe9984849c1: Download complete [docker:18-git] 9fe9984849c1: Download complete [alpine:3.12.0] Digest: sha256:185518070891758909c9f839cf4ca393ee977ac378609f700f60a771a2dfe321 [docker:18-git] 7f94eaf8af20: Download complete [docker:18-dind] 7f94eaf8af20: Download complete [alpine:3.12.0] Status: Downloaded newer image for alpine:3.12.0 [alpine:3.12.0] docker.io/library/alpine:3.12.0 [docker:18-dind] 9d48c3bd43c5: Download complete [docker:18-git] 9d48c3bd43c5: Verifying Checksum [docker:18-git] 9d48c3bd43c5: Download complete [docker:18-dind] 6ef266ac0949: Verifying Checksum [docker:18-dind] 6ef266ac0949: Download complete [docker:18-git] 6ef266ac0949: Verifying Checksum [docker:18-git] 6ef266ac0949: Download complete [docker:18-git] 9d48c3bd43c5: Pull complete [docker:18-dind] 9d48c3bd43c5: Pull complete [docker:18-dind] b2c2c13f4c08: Download complete [docker:18-git] b2c2c13f4c08: Download complete [docker:18-dind] 7f94eaf8af20: Pull complete [docker:18-git] 7f94eaf8af20: Pull complete [docker:18-dind] f354b3ae6d74: Verifying Checksum [docker:18-git] f354b3ae6d74: Verifying Checksum [docker:18-dind] f354b3ae6d74: Download complete [docker:18-git] f354b3ae6d74: Download complete [docker:18-git] 9fe9984849c1: Pull complete [docker:18-dind] 9fe9984849c1: Pull complete [docker:18-git] 6ab2580d9dce: Verifying Checksum [docker:18-git] 6ab2580d9dce: Download complete [docker:18-dind] 8f4a6170836f: Verifying Checksum [docker:18-dind] 8f4a6170836f: Download complete [docker:18-dind] 853fedec02a1: Verifying Checksum [docker:18-dind] 853fedec02a1: Download complete [docker:18-dind] a57a377d7e5d: Verifying Checksum [docker:18-dind] a57a377d7e5d: Download complete [docker:18-git] 3091f1b4f1aa: Download complete [docker:18-dind] 3091f1b4f1aa: Download complete [docker:18-dind] ac4bc61da695: Verifying Checksum [docker:18-dind] ac4bc61da695: Download complete [docker:18-git] 3091f1b4f1aa: Pull complete [docker:18-dind] 3091f1b4f1aa: Pull complete [docker:18-dind] 6ef266ac0949: Pull complete [docker:18-git] 6ef266ac0949: Pull complete [docker:18-dind] b2c2c13f4c08: Pull complete [docker:18-git] b2c2c13f4c08: Pull complete [docker:18-dind] f354b3ae6d74: Pull complete [docker:18-git] f354b3ae6d74: Pull complete [docker:18-dind] 8f4a6170836f: Pull complete [docker:18-dind] 853fedec02a1: Pull complete [docker:18-dind] a57a377d7e5d: Pull complete [docker:18-dind] ac4bc61da695: Pull complete [docker:18-dind] Digest: sha256:86df3c3573065f2c6f24cd925fd5bc3a0aff899bdf664ff4d2e3ebab26d96bed [docker:18-dind] Status: Downloaded newer image for docker:18-dind [docker:18-dind] docker.io/library/docker:18-dind [docker:18-git] 6ab2580d9dce: Pull complete [docker:18-git] Digest: sha256:5fafa7fc518da8990feb9983a6f0d5069b8e4717e3f922e23e445a50e6c731ec [docker:18-git] Status: Downloaded newer image for docker:18-git [docker:18-git] docker.io/library/docker:18-git # Executing tests Number of definitions: 112 Suite size: 8 Suite index: 2 Execution size: 15 Execution offset: 16  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestLogLineWithoutSecret --- PASS: TestLogLineWithoutSecret (0.00s) build_logger_test.go:63: PASS: IsStdout() build_logger_test.go:63: PASS: Write(string) === RUN TestLogLineWithSecret --- PASS: TestLogLineWithSecret (0.00s) build_logger_test.go:84: PASS: IsStdout() build_logger_test.go:84: PASS: Write(string) === RUN TestLogPrinters === RUN TestLogPrinters/with_entry time="2021-05-20T15:28:51Z" level=info msg=info printer=test time="2021-05-20T15:28:51Z" level=warning msg=warning printer=test time="2021-05-20T15:28:51Z" level=warning msg=softerror printer=test time="2021-05-20T15:28:51Z" level=error msg=error printer=test === RUN TestLogPrinters/null_writer --- PASS: TestLogPrinters (0.00s) --- PASS: TestLogPrinters/with_entry (0.00s) build_logger_test.go:124: PASS: IsStdout() build_logger_test.go:124: PASS: Write(string) --- PASS: TestLogPrinters/null_writer (0.00s) build_logger_test.go:124: PASS: IsStdout() build_logger_test.go:124: PASS: Write(string) === RUN TestBuildPredefinedVariables === RUN TestBuildPredefinedVariables//root/dir1 Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestBuildPredefinedVariables//root/dir1" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded === RUN TestBuildPredefinedVariables//root/dir2 Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestBuildPredefinedVariables//root/dir2" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestBuildPredefinedVariables (0.01s) --- PASS: TestBuildPredefinedVariables//root/dir1 (0.01s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() --- PASS: TestBuildPredefinedVariables//root/dir2 (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() === RUN TestBuildRun Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestBuildRun" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestBuildRun (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() === RUN TestBuildPanic === RUN TestBuildPanic/shell === RUN TestBuildPanic/run+cleanup === RUN TestBuildPanic/finish === RUN TestBuildPanic/finish+cleanup+shell === RUN TestBuildPanic/run+finish+cleanup === RUN TestBuildPanic/prepare === RUN TestBuildPanic/run === RUN TestBuildPanic/cleanup --- PASS: TestBuildPanic (0.01s) --- PASS: TestBuildPanic/shell (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/run+cleanup (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Run(string) build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/finish (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Run(string) build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/finish+cleanup+shell (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/run+finish+cleanup (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Run(string) build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/prepare (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) --- PASS: TestBuildPanic/run (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Run(string) build_test.go:164: PASS: Cleanup() --- PASS: TestBuildPanic/cleanup (0.00s) build_test.go:164: PASS: CanCreate() build_test.go:164: PASS: GetDefaultShell() build_test.go:164: PASS: GetFeatures(string) build_test.go:164: PASS: Create() build_test.go:164: PASS: Prepare(string,string,string) build_test.go:164: PASS: Finish(string) build_test.go:164: PASS: Shell() build_test.go:164: PASS: Run(string) build_test.go:164: PASS: Cleanup() === RUN TestJobImageExposed === RUN TestJobImageExposed/normal_image_exposed Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestJobImageExposed/normal_image_exposed" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded === RUN TestJobImageExposed/no_image_specified Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestJobImageExposed/no_image_specified" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded === RUN TestJobImageExposed/image_with_variable_expansion Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestJobImageExposed/image_with_variable_expansion" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestJobImageExposed (0.01s) --- PASS: TestJobImageExposed/normal_image_exposed (0.01s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() --- PASS: TestJobImageExposed/no_image_specified (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() --- PASS: TestJobImageExposed/image_with_variable_expansion (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() === RUN TestBuildRunNoModifyConfig Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestBuildRunNoModifyConfig" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestBuildRunNoModifyConfig (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() === RUN TestRetryPrepare Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestRetryPrepare" executor ERROR: Preparation failed: prepare failed Will be retried in 0s ... ERROR: Preparation failed: prepare failed Will be retried in 0s ... Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestRetryPrepare (0.00s) build_test.go:272: PASS: CanCreate() build_test.go:272: PASS: GetDefaultShell() build_test.go:272: PASS: GetFeatures(string) build_test.go:272: PASS: Create() build_test.go:272: PASS: Prepare(string,string,string) build_test.go:272: PASS: Prepare(string,string,string) build_test.go:272: PASS: Cleanup() build_test.go:272: PASS: Shell() build_test.go:272: PASS: Run(string) build_test.go:272: PASS: Finish() === RUN TestPrepareFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestPrepareFailure" executor ERROR: Preparation failed: prepare failed Will be retried in 0s ... ERROR: Preparation failed: prepare failed Will be retried in 0s ... ERROR: Preparation failed: prepare failed Will be retried in 0s ... ERROR: Job failed (system failure): prepare failed --- PASS: TestPrepareFailure (0.00s) build_test.go:298: PASS: CanCreate() build_test.go:298: PASS: GetDefaultShell() build_test.go:298: PASS: GetFeatures(string) build_test.go:298: PASS: Create() build_test.go:298: PASS: Prepare(string,string,string) build_test.go:298: PASS: Cleanup() === RUN TestPrepareFailureOnBuildError Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestPrepareFailureOnBuildError" executor ERROR: Job failed: error --- PASS: TestPrepareFailureOnBuildError (0.00s) build_test.go:313: PASS: CanCreate() build_test.go:313: PASS: GetDefaultShell() build_test.go:313: PASS: GetFeatures(string) build_test.go:313: PASS: Create() build_test.go:313: PASS: Prepare(string,string,string) build_test.go:313: PASS: Cleanup() PASS coverage: 15.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.068s coverage: 15.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 1 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestPrepareFailureOnBuildError Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestPrepareFailureOnBuildError" executor ERROR: Job failed: error --- PASS: TestPrepareFailureOnBuildError (0.00s) build_test.go:313: PASS: CanCreate() build_test.go:313: PASS: GetDefaultShell() build_test.go:313: PASS: GetFeatures(string) build_test.go:313: PASS: Create() build_test.go:313: PASS: Prepare(string,string,string) build_test.go:313: PASS: Cleanup() === RUN TestPrepareEnvironmentFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "build-run-prepare-environment-failure-on-build-error" executor Preparing environment ERROR: Job failed (system failure): prepare environment: test-err. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information --- PASS: TestPrepareEnvironmentFailure (0.00s) build_test.go:350: PASS: CanCreate() build_test.go:350: PASS: GetDefaultShell() build_test.go:350: PASS: GetFeatures(string) build_test.go:350: PASS: Create() build_test.go:350: PASS: Prepare(string,string,string) build_test.go:350: PASS: Cleanup() build_test.go:350: PASS: Shell() build_test.go:350: PASS: Run(mock.argumentMatcher) build_test.go:350: PASS: Finish(string) === RUN TestJobFailure --- PASS: TestJobFailure (0.00s) build_test.go:394: PASS: Write(string) build_test.go:394: PASS: IsStdout() build_test.go:394: PASS: SetCancelFunc(string) build_test.go:394: PASS: SetAbortFunc(string) build_test.go:394: PASS: SetMasked(string) build_test.go:394: PASS: Fail(*common.BuildError,common.JobFailureData) build_test.go:394: PASS: CanCreate() build_test.go:394: PASS: GetDefaultShell() build_test.go:394: PASS: GetFeatures(string) build_test.go:394: PASS: Create() build_test.go:394: PASS: Prepare(string,string,string) build_test.go:394: PASS: Cleanup() build_test.go:394: PASS: Shell() build_test.go:394: PASS: Run(mock.argumentMatcher) build_test.go:394: PASS: Run(string) build_test.go:394: PASS: Run(mock.argumentMatcher) build_test.go:394: PASS: Finish(*common.BuildError) === RUN TestJobFailureOnExecutionTimeout time="2021-05-20T15:28:55Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 --- PASS: TestJobFailureOnExecutionTimeout (2.00s) build_test.go:431: PASS: Write(string) build_test.go:431: PASS: IsStdout() build_test.go:431: PASS: SetCancelFunc(string) build_test.go:431: PASS: SetAbortFunc(string) build_test.go:431: PASS: SetMasked(string) build_test.go:431: PASS: Fail(string,common.JobFailureData) build_test.go:431: PASS: CanCreate() build_test.go:431: PASS: GetDefaultShell() build_test.go:431: PASS: GetFeatures(string) build_test.go:431: PASS: Create() build_test.go:431: PASS: Prepare(string,string,string) build_test.go:431: PASS: Cleanup() build_test.go:431: PASS: Shell() build_test.go:431: PASS: Run(mock.argumentMatcher) build_test.go:431: PASS: Run(string) build_test.go:431: PASS: Finish(string) === RUN TestRunFailureRunsAfterScriptAndArtifactsOnFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "build-run-run-failure" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables ERROR: Job failed (system failure): build fail --- PASS: TestRunFailureRunsAfterScriptAndArtifactsOnFailure (0.00s) build_test.go:468: PASS: CanCreate() build_test.go:468: PASS: GetDefaultShell() build_test.go:468: PASS: GetFeatures(string) build_test.go:468: PASS: Create() build_test.go:468: PASS: Prepare(string,string,string) build_test.go:468: PASS: Cleanup() build_test.go:468: PASS: Shell() build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Run(mock.argumentMatcher) build_test.go:468: PASS: Finish(*errors.errorString) === RUN TestGetSourcesRunFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestGetSourcesRunFailure" executor Preparing environment Getting source from Git repository Getting source from Git repository Getting source from Git repository Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables ERROR: Job failed (system failure): build fail --- PASS: TestGetSourcesRunFailure (0.00s) build_test.go:491: PASS: CanCreate() build_test.go:491: PASS: GetDefaultShell() build_test.go:491: PASS: GetFeatures(string) build_test.go:491: PASS: Create() build_test.go:491: PASS: Prepare(string,string,string) build_test.go:491: PASS: Cleanup() build_test.go:491: PASS: Shell() build_test.go:491: PASS: Run(mock.argumentMatcher) build_test.go:491: PASS: Run(mock.argumentMatcher) build_test.go:491: PASS: Run(mock.argumentMatcher) build_test.go:491: PASS: Run(mock.argumentMatcher) build_test.go:491: PASS: Run(mock.argumentMatcher) build_test.go:491: PASS: Finish(*errors.errorString) === RUN TestArtifactDownloadRunFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestArtifactDownloadRunFailure" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Downloading artifacts Downloading artifacts Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables ERROR: Job failed (system failure): build fail --- PASS: TestArtifactDownloadRunFailure (0.00s) build_test.go:516: PASS: CanCreate() build_test.go:516: PASS: GetDefaultShell() build_test.go:516: PASS: GetFeatures(string) build_test.go:516: PASS: Create() build_test.go:516: PASS: Prepare(string,string,string) build_test.go:516: PASS: Cleanup() build_test.go:516: PASS: Shell() build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Run(mock.argumentMatcher) build_test.go:516: PASS: Finish(*errors.errorString) === RUN TestArtifactUploadRunFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestArtifactUploadRunFailure" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables ERROR: Job failed (system failure): upload fail --- PASS: TestArtifactUploadRunFailure (0.00s) build_test.go:550: PASS: CanCreate() build_test.go:550: PASS: GetDefaultShell() build_test.go:550: PASS: GetFeatures(string) build_test.go:550: PASS: Create() build_test.go:550: PASS: Prepare(string,string,string) build_test.go:550: PASS: Cleanup() build_test.go:550: PASS: Shell() build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Run(mock.argumentMatcher) build_test.go:550: PASS: Finish(*errors.errorString) === RUN TestArchiveCacheOnScriptFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestArchiveCacheOnScriptFailure" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables ERROR: Job failed (system failure): script failure --- PASS: TestArchiveCacheOnScriptFailure (0.00s) build_test.go:576: PASS: CanCreate() build_test.go:576: PASS: GetDefaultShell() build_test.go:576: PASS: GetFeatures(string) build_test.go:576: PASS: Create() build_test.go:576: PASS: Prepare(string,string,string) build_test.go:576: PASS: Cleanup() build_test.go:576: PASS: Shell() build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Run(mock.argumentMatcher) build_test.go:576: PASS: Finish(*errors.errorString) === RUN TestUploadArtifactsOnArchiveCacheFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestUploadArtifactsOnArchiveCacheFailure" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables ERROR: Job failed (system failure): cache failure --- PASS: TestUploadArtifactsOnArchiveCacheFailure (0.00s) build_test.go:602: PASS: CanCreate() build_test.go:602: PASS: GetDefaultShell() build_test.go:602: PASS: GetFeatures(string) build_test.go:602: PASS: Create() build_test.go:602: PASS: Prepare(string,string,string) build_test.go:602: PASS: Cleanup() build_test.go:602: PASS: Shell() build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Run(mock.argumentMatcher) build_test.go:602: PASS: Finish(*errors.errorString) PASS coverage: 14.9% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 2.052s coverage: 14.9% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 2 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestRestoreCacheRunFailure Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestRestoreCacheRunFailure" executor Preparing environment Getting source from Git repository Restoring cache Restoring cache Restoring cache Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables ERROR: Job failed (system failure): build fail --- PASS: TestRestoreCacheRunFailure (0.01s) build_test.go:626: PASS: CanCreate() build_test.go:626: PASS: GetDefaultShell() build_test.go:626: PASS: GetFeatures(string) build_test.go:626: PASS: Create() build_test.go:626: PASS: Prepare(string,string,string) build_test.go:626: PASS: Cleanup() build_test.go:626: PASS: Shell() build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Run(mock.argumentMatcher) build_test.go:626: PASS: Finish(*errors.errorString) === RUN TestRunWrongAttempts Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestRunWrongAttempts" executor Preparing environment Saving cache for failed job Uploading artifacts for failed job Cleaning up file based variables time="2021-05-20T15:28:57Z" level=warning msg="Error while executing file based variables removal script" error="number of attempts out of the range [1, 10] for stage: get_sources" job=0 project=0 ERROR: Job failed (system failure): number of attempts out of the range [1, 10] for stage: get_sources --- PASS: TestRunWrongAttempts (0.00s) build_test.go:651: PASS: Prepare(string,string,string) build_test.go:651: PASS: Cleanup() build_test.go:651: PASS: Shell() build_test.go:651: PASS: Run(string) build_test.go:651: PASS: Run(string) build_test.go:651: PASS: Finish(*errors.errorString) build_test.go:651: PASS: CanCreate() build_test.go:651: PASS: GetDefaultShell() build_test.go:651: PASS: GetFeatures(string) build_test.go:651: PASS: Create() === RUN TestRunSuccessOnSecondAttempt Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestRunSuccessOnSecondAttempt" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestRunSuccessOnSecondAttempt (0.00s) build_test.go:673: PASS: CanCreate() build_test.go:673: PASS: GetDefaultShell() build_test.go:673: PASS: GetFeatures(string) build_test.go:673: PASS: Create() === RUN TestDebugTrace === RUN TestDebugTrace/variable_set_to_true === RUN TestDebugTrace/variable_set_to_a_non-bool_value === RUN TestDebugTrace/variable_set_to_true_and_feature_disabled_from_configuration === RUN TestDebugTrace/variable_not_set === RUN TestDebugTrace/variable_set_to_false --- PASS: TestDebugTrace (0.00s) --- PASS: TestDebugTrace/variable_set_to_true (0.00s) --- PASS: TestDebugTrace/variable_set_to_a_non-bool_value (0.00s) --- PASS: TestDebugTrace/variable_set_to_true_and_feature_disabled_from_configuration (0.00s) --- PASS: TestDebugTrace/variable_not_set (0.00s) --- PASS: TestDebugTrace/variable_set_to_false (0.00s) === RUN TestDefaultEnvVariables === RUN TestDefaultEnvVariables/Windows_UNC-style_BuildDir_(extended-length_path_support) === RUN TestDefaultEnvVariables/Windows_UNC-style_BuildDir === RUN TestDefaultEnvVariables/Windows-style_BuildDir_(CMD_or_PS) === RUN TestDefaultEnvVariables/Windows-style_BuildDir_with_forward_slashes_and_drive_letter === RUN TestDefaultEnvVariables/UNIX-style_BuildDir === RUN TestDefaultEnvVariables/Windows-style_BuildDir_in_MSYS_bash_executor_and_drive_letter) --- PASS: TestDefaultEnvVariables (0.00s) --- PASS: TestDefaultEnvVariables/Windows_UNC-style_BuildDir_(extended-length_path_support) (0.00s) --- PASS: TestDefaultEnvVariables/Windows_UNC-style_BuildDir (0.00s) --- PASS: TestDefaultEnvVariables/Windows-style_BuildDir_(CMD_or_PS) (0.00s) --- PASS: TestDefaultEnvVariables/Windows-style_BuildDir_with_forward_slashes_and_drive_letter (0.00s) --- PASS: TestDefaultEnvVariables/UNIX-style_BuildDir (0.00s) --- PASS: TestDefaultEnvVariables/Windows-style_BuildDir_in_MSYS_bash_executor_and_drive_letter) (0.00s) === RUN TestSharedEnvVariables === RUN TestSharedEnvVariables/Value:true === RUN TestSharedEnvVariables/Value:false --- PASS: TestSharedEnvVariables (0.00s) --- PASS: TestSharedEnvVariables/Value:true (0.00s) --- PASS: TestSharedEnvVariables/Value:false (0.00s) === RUN TestGetRemoteURL --- PASS: TestGetRemoteURL (0.00s) === RUN TestIsFeatureFlagOn === RUN TestIsFeatureFlagOn/invalid_value time="2021-05-20T15:28:57Z" level=error msg="Error while parsing the value of feature flag" error="strconv.ParseBool: parsing \"test\": invalid syntax" job=0 name=FF_TEST_FEATURE project=0 value=test === RUN TestIsFeatureFlagOn/feature_flag_set_inside_config.toml_take_precedence === RUN TestIsFeatureFlagOn/no_value === RUN TestIsFeatureFlagOn/true === RUN TestIsFeatureFlagOn/1 === RUN TestIsFeatureFlagOn/false === RUN TestIsFeatureFlagOn/0 --- PASS: TestIsFeatureFlagOn (0.00s) --- PASS: TestIsFeatureFlagOn/invalid_value (0.00s) --- PASS: TestIsFeatureFlagOn/feature_flag_set_inside_config.toml_take_precedence (0.00s) --- PASS: TestIsFeatureFlagOn/no_value (0.00s) --- PASS: TestIsFeatureFlagOn/true (0.00s) --- PASS: TestIsFeatureFlagOn/1 (0.00s) --- PASS: TestIsFeatureFlagOn/false (0.00s) --- PASS: TestIsFeatureFlagOn/0 (0.00s) === RUN TestIsFeatureFlagOn_SetWithRunnerVariables === RUN TestIsFeatureFlagOn_SetWithRunnerVariables/it_has_default_value_of_FF === RUN TestIsFeatureFlagOn_SetWithRunnerVariables/it_enables_FF === RUN TestIsFeatureFlagOn_SetWithRunnerVariables/it_disable_FF --- PASS: TestIsFeatureFlagOn_SetWithRunnerVariables (0.00s) --- PASS: TestIsFeatureFlagOn_SetWithRunnerVariables/it_has_default_value_of_FF (0.00s) --- PASS: TestIsFeatureFlagOn_SetWithRunnerVariables/it_enables_FF (0.00s) --- PASS: TestIsFeatureFlagOn_SetWithRunnerVariables/it_disable_FF (0.00s) === RUN TestIsFeatureFlagOn_Precedence === RUN TestIsFeatureFlagOn_Precedence/config_takes_precedence_over_job_variable === RUN TestIsFeatureFlagOn_Precedence/config_takes_precedence_over_configured_environments === RUN TestIsFeatureFlagOn_Precedence/variable_defined_at_job_take_precedence_over_configured_environments --- PASS: TestIsFeatureFlagOn_Precedence (0.00s) --- PASS: TestIsFeatureFlagOn_Precedence/config_takes_precedence_over_job_variable (0.00s) --- PASS: TestIsFeatureFlagOn_Precedence/config_takes_precedence_over_configured_environments (0.00s) --- PASS: TestIsFeatureFlagOn_Precedence/variable_defined_at_job_take_precedence_over_configured_environments (0.00s) PASS coverage: 14.7% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.032s coverage: 14.7% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 3 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestStartBuild === RUN TestStartBuild/invalid_GIT_CLONE_PATH_was_specified === RUN TestStartBuild/no_job_specific_build_dir_with_no_shared_dir === RUN TestStartBuild/no_job_specified_build_dir_with_shared_dir === RUN TestStartBuild/valid_GIT_CLONE_PATH_was_specified === RUN TestStartBuild/valid_GIT_CLONE_PATH_using_CI_BUILDS_DIR_was_specified === RUN TestStartBuild/out-of-bounds_GIT_CLONE_PATH_was_specified === RUN TestStartBuild/custom_build_disabled --- PASS: TestStartBuild (0.00s) --- PASS: TestStartBuild/invalid_GIT_CLONE_PATH_was_specified (0.00s) --- PASS: TestStartBuild/no_job_specific_build_dir_with_no_shared_dir (0.00s) --- PASS: TestStartBuild/no_job_specified_build_dir_with_shared_dir (0.00s) --- PASS: TestStartBuild/valid_GIT_CLONE_PATH_was_specified (0.00s) --- PASS: TestStartBuild/valid_GIT_CLONE_PATH_using_CI_BUILDS_DIR_was_specified (0.00s) --- PASS: TestStartBuild/out-of-bounds_GIT_CLONE_PATH_was_specified (0.00s) --- PASS: TestStartBuild/custom_build_disabled (0.00s) === RUN TestSkipBuildStageFeatureFlag === RUN TestSkipBuildStageFeatureFlag/true === RUN TestSkipBuildStageFeatureFlag/false --- PASS: TestSkipBuildStageFeatureFlag (0.00s) --- PASS: TestSkipBuildStageFeatureFlag/true (0.00s) build_test.go:1188: PASS: Shell() build_test.go:1188: PASS: Run(mock.argumentMatcher) --- PASS: TestSkipBuildStageFeatureFlag/false (0.00s) build_test.go:1188: PASS: Shell() build_test.go:1188: PASS: Run(mock.argumentMatcher) build_test.go:1190: PASS: GetName() build_test.go:1190: PASS: GenerateScript(string,string) build_test.go:1190: PASS: GenerateScript(string,string) === RUN TestWaitForTerminal === RUN TestWaitForTerminal/Cancel_build Terminal is connected, will time out in 1h0m0s... === RUN TestWaitForTerminal/Terminal_Timeout Terminal is connected, will time out in 1s... terminal session timed out (maximum time allowed - 1s) time="2021-05-20T15:29:00Z" level=warning msg="Closed active terminal connection" uri=/session/774b71d624019a957cb7c22445414e8820c421ffb00b7e8e8d708ff6bf5ca3f1 === RUN TestWaitForTerminal/System_Interrupt Terminal is connected, will time out in 1h0m0s... Terminal disconnected === RUN TestWaitForTerminal/Terminal_Disconnect Terminal is connected, will time out in 1h0m0s... Terminal disconnected --- PASS: TestWaitForTerminal (1.01s) --- PASS: TestWaitForTerminal/Cancel_build (0.00s) build_test.go:1313: PASS: Connect() build_test.go:1313: PASS: Close() build_test.go:1313: PASS: Start(string,string,string,string) --- PASS: TestWaitForTerminal/Terminal_Timeout (1.00s) build_test.go:1313: PASS: Connect() build_test.go:1313: PASS: Close() build_test.go:1313: PASS: Start(string,string,string,string) --- PASS: TestWaitForTerminal/System_Interrupt (0.00s) build_test.go:1313: PASS: Connect() build_test.go:1313: PASS: Close() build_test.go:1313: PASS: Start(string,string,string,string) --- PASS: TestWaitForTerminal/Terminal_Disconnect (0.00s) build_test.go:1313: PASS: Connect() build_test.go:1313: PASS: Close() build_test.go:1313: PASS: Start(string,string,string,string) === RUN TestBuild_IsLFSSmudgeDisabled === RUN TestBuild_IsLFSSmudgeDisabled/variable_set_to_1 === RUN TestBuild_IsLFSSmudgeDisabled/variable_set_to_0 === RUN TestBuild_IsLFSSmudgeDisabled/variable_not_set === RUN TestBuild_IsLFSSmudgeDisabled/variable_empty === RUN TestBuild_IsLFSSmudgeDisabled/variable_set_to_true === RUN TestBuild_IsLFSSmudgeDisabled/variable_set_to_false --- PASS: TestBuild_IsLFSSmudgeDisabled (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_set_to_1 (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_set_to_0 (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_not_set (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_empty (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_set_to_true (0.00s) --- PASS: TestBuild_IsLFSSmudgeDisabled/variable_set_to_false (0.00s) === RUN TestGitCleanFlags === RUN TestGitCleanFlags/empty_clean_flags === RUN TestGitCleanFlags/use_custom_flags === RUN TestGitCleanFlags/use_custom_flags_with_multiple_arguments === RUN TestGitCleanFlags/disabled --- PASS: TestGitCleanFlags (0.00s) --- PASS: TestGitCleanFlags/empty_clean_flags (0.00s) --- PASS: TestGitCleanFlags/use_custom_flags (0.00s) --- PASS: TestGitCleanFlags/use_custom_flags_with_multiple_arguments (0.00s) --- PASS: TestGitCleanFlags/disabled (0.00s) === RUN TestGitFetchFlags === RUN TestGitFetchFlags/use_custom_flags === RUN TestGitFetchFlags/use_custom_flags_with_multiple_arguments === RUN TestGitFetchFlags/disabled === RUN TestGitFetchFlags/empty_fetch_flags --- PASS: TestGitFetchFlags (0.00s) --- PASS: TestGitFetchFlags/use_custom_flags (0.00s) --- PASS: TestGitFetchFlags/use_custom_flags_with_multiple_arguments (0.00s) --- PASS: TestGitFetchFlags/disabled (0.00s) --- PASS: TestGitFetchFlags/empty_fetch_flags (0.00s) === RUN TestDefaultVariables === RUN TestDefaultVariables/get_default_CI_SERVER_value === RUN TestDefaultVariables/get_default_CI_PROJECT_DIR_value === RUN TestDefaultVariables/get_overwritten_CI_PROJECT_DIR_value --- PASS: TestDefaultVariables (0.00s) --- PASS: TestDefaultVariables/get_default_CI_SERVER_value (0.00s) --- PASS: TestDefaultVariables/get_default_CI_PROJECT_DIR_value (0.00s) --- PASS: TestDefaultVariables/get_overwritten_CI_PROJECT_DIR_value (0.00s) === RUN TestBuildFinishTimeout === RUN TestBuildFinishTimeout/channel_returns_first === RUN TestBuildFinishTimeout/timeout_returns_first --- PASS: TestBuildFinishTimeout (0.01s) --- PASS: TestBuildFinishTimeout/channel_returns_first (0.00s) --- PASS: TestBuildFinishTimeout/timeout_returns_first (0.01s) === RUN TestProjectUniqueName === RUN TestProjectUniqueName/project_non_rfc1132_unique_name === RUN TestProjectUniqueName/project_normal_unique_name --- PASS: TestProjectUniqueName (0.00s) --- PASS: TestProjectUniqueName/project_non_rfc1132_unique_name (0.00s) --- PASS: TestProjectUniqueName/project_normal_unique_name (0.00s) === RUN TestBuildStages === RUN TestBuildStages/script_only_build === RUN TestBuildStages/multistep_build --- PASS: TestBuildStages (0.00s) --- PASS: TestBuildStages/script_only_build (0.00s) --- PASS: TestBuildStages/multistep_build (0.00s) PASS coverage: 11.7% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 1.041s coverage: 11.7% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 4 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestBuild_GetExecutorJobSectionAttempts === RUN TestBuild_GetExecutorJobSectionAttempts/#00 === RUN TestBuild_GetExecutorJobSectionAttempts/3 === RUN TestBuild_GetExecutorJobSectionAttempts/0 === RUN TestBuild_GetExecutorJobSectionAttempts/99 --- PASS: TestBuild_GetExecutorJobSectionAttempts (0.00s) --- PASS: TestBuild_GetExecutorJobSectionAttempts/#00 (0.00s) --- PASS: TestBuild_GetExecutorJobSectionAttempts/3 (0.00s) --- PASS: TestBuild_GetExecutorJobSectionAttempts/0 (0.00s) --- PASS: TestBuild_GetExecutorJobSectionAttempts/99 (0.00s) === RUN TestBuild_getFeatureFlagInfo === RUN TestBuild_getFeatureFlagInfo/true === RUN TestBuild_getFeatureFlagInfo/1 === RUN TestBuild_getFeatureFlagInfo/invalid time="2021-05-20T15:29:02Z" level=error msg="Error while parsing the value of feature flag" error="strconv.ParseBool: parsing \"invalid\": invalid syntax" job=0 name=FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION project=0 value=invalid --- PASS: TestBuild_getFeatureFlagInfo (0.00s) --- PASS: TestBuild_getFeatureFlagInfo/true (0.00s) --- PASS: TestBuild_getFeatureFlagInfo/1 (0.00s) --- PASS: TestBuild_getFeatureFlagInfo/invalid (0.00s) === RUN TestSecretsResolving === RUN TestSecretsResolving/error_on_creating_resolver Running with gitlab-runner 13.12.0 (7a6612da) ERROR: Job failed (system failure): creating secrets resolver: assert.AnError general error for testing === RUN TestSecretsResolving/error_on_secrets_resolving Running with gitlab-runner 13.12.0 (7a6612da) ERROR: Job failed (system failure): resolving secrets: assert.AnError general error for testing === RUN TestSecretsResolving/secrets_resolved Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestSecretsResolving/secrets_resolved" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded === RUN TestSecretsResolving/secrets_not_present Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "TestSecretsResolving/secrets_not_present" executor Preparing environment Getting source from Git repository Restoring cache Downloading artifacts Executing "step_script" stage of the job script Running after_script Saving cache for successful job Uploading artifacts for successful job Cleaning up file based variables Job succeeded --- PASS: TestSecretsResolving (0.01s) --- PASS: TestSecretsResolving/error_on_creating_resolver (0.00s) build_test.go:1787: PASS: CanCreate() build_test.go:1787: PASS: GetDefaultShell() build_test.go:1787: PASS: GetFeatures(string) --- PASS: TestSecretsResolving/error_on_secrets_resolving (0.00s) build_test.go:1787: PASS: CanCreate() build_test.go:1787: PASS: GetDefaultShell() build_test.go:1787: PASS: GetFeatures(string) build_test.go:1877: PASS: Resolve(common.Secrets) --- PASS: TestSecretsResolving/secrets_resolved (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() build_test.go:1880: PASS: Resolve(common.Secrets) --- PASS: TestSecretsResolving/secrets_not_present (0.00s) build_test.go:1705: PASS: Prepare(string) build_test.go:1705: PASS: Finish() build_test.go:1705: PASS: Cleanup() build_test.go:1705: PASS: Shell() build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1705: PASS: Run(mock.argumentMatcher) build_test.go:1706: PASS: CanCreate() build_test.go:1706: PASS: GetDefaultShell() build_test.go:1706: PASS: GetFeatures(string) build_test.go:1706: PASS: Create() === RUN TestCacheS3Config_ShouldUseIAMCredentials === RUN TestCacheS3Config_ShouldUseIAMCredentials/SecretKey_is_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/AccessKey_is_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_is_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_&_AccessKey_are_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_&_SecretKey_are_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/Nothing_is_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/Everything_is_empty === RUN TestCacheS3Config_ShouldUseIAMCredentials/Both_AccessKey_&_SecretKey_are_empty --- PASS: TestCacheS3Config_ShouldUseIAMCredentials (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/SecretKey_is_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/AccessKey_is_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_is_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_&_AccessKey_are_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/ServerAddress_&_SecretKey_are_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/Nothing_is_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/Everything_is_empty (0.00s) --- PASS: TestCacheS3Config_ShouldUseIAMCredentials/Both_AccessKey_&_SecretKey_are_empty (0.00s) === RUN TestConfigParse === RUN TestConfigParse/parse_Service_as_table_int_value_name === RUN TestConfigParse/parse_Service_as_table_with_only_alias === RUN TestConfigParse/parse_Service_runners.docker_and_runners.docker.services === RUN TestConfigParse/check_that_GracefulKillTimeout_and_ForceKillTimeout_can't_be_set === RUN TestConfigParse/setting_DNS_policy_to_cluster-first-with-host-net === RUN TestConfigParse/parse_Service_as_table === RUN TestConfigParse/check_node_affinities === RUN TestConfigParse/setting_DNS_policy_to_default === RUN TestConfigParse/setting_DNS_policy_to_cluster-first === RUN TestConfigParse/fail_setting_DNS_policy_to_invalid_value === RUN TestConfigParse/fail_setting_DNS_policy_to_empty_value_returns_default_value === RUN TestConfigParse/parse_Service_as_table_int_value_alias === RUN TestConfigParse/parse_Service_as_table_with_only_name === RUN TestConfigParse/setting_DNS_policy_to_none --- PASS: TestConfigParse (0.00s) --- PASS: TestConfigParse/parse_Service_as_table_int_value_name (0.00s) --- PASS: TestConfigParse/parse_Service_as_table_with_only_alias (0.00s) --- PASS: TestConfigParse/parse_Service_runners.docker_and_runners.docker.services (0.00s) --- PASS: TestConfigParse/check_that_GracefulKillTimeout_and_ForceKillTimeout_can't_be_set (0.00s) --- PASS: TestConfigParse/setting_DNS_policy_to_cluster-first-with-host-net (0.00s) --- PASS: TestConfigParse/parse_Service_as_table (0.00s) --- PASS: TestConfigParse/check_node_affinities (0.00s) --- PASS: TestConfigParse/setting_DNS_policy_to_default (0.00s) --- PASS: TestConfigParse/setting_DNS_policy_to_cluster-first (0.00s) --- PASS: TestConfigParse/fail_setting_DNS_policy_to_invalid_value (0.00s) --- PASS: TestConfigParse/fail_setting_DNS_policy_to_empty_value_returns_default_value (0.00s) --- PASS: TestConfigParse/parse_Service_as_table_int_value_alias (0.00s) --- PASS: TestConfigParse/parse_Service_as_table_with_only_name (0.00s) --- PASS: TestConfigParse/setting_DNS_policy_to_none (0.00s) === RUN TestKubernetesHostAliases === RUN TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_empty_list === RUN TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_unique_ips === RUN TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_duplicated_ip === RUN TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_duplicated_hostname --- PASS: TestKubernetesHostAliases (0.00s) --- PASS: TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_empty_list (0.00s) --- PASS: TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_unique_ips (0.00s) --- PASS: TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_duplicated_ip (0.00s) --- PASS: TestKubernetesHostAliases/parse_Kubernetes_HostAliases_with_duplicated_hostname (0.00s) === RUN TestService_ToImageDefinition === RUN TestService_ToImageDefinition/name_and_alias === RUN TestService_ToImageDefinition/command_specified === RUN TestService_ToImageDefinition/entrypoint_specified === RUN TestService_ToImageDefinition/command_and_entrypoint_specified === RUN TestService_ToImageDefinition/empty_service === RUN TestService_ToImageDefinition/only_name === RUN TestService_ToImageDefinition/only_alias --- PASS: TestService_ToImageDefinition (0.00s) --- PASS: TestService_ToImageDefinition/name_and_alias (0.00s) --- PASS: TestService_ToImageDefinition/command_specified (0.00s) --- PASS: TestService_ToImageDefinition/entrypoint_specified (0.00s) --- PASS: TestService_ToImageDefinition/command_and_entrypoint_specified (0.00s) --- PASS: TestService_ToImageDefinition/empty_service (0.00s) --- PASS: TestService_ToImageDefinition/only_name (0.00s) --- PASS: TestService_ToImageDefinition/only_alias (0.00s) === RUN TestDockerMachine === RUN TestDockerMachine/autoscaling_config_active === RUN TestDockerMachine/autoscaling_overrides_offpeak_config === RUN TestDockerMachine/global_config_only === RUN TestDockerMachine/offpeak_active === RUN TestDockerMachine/offpeak_inactive === RUN TestDockerMachine/offpeak_invalid_format === RUN TestDockerMachine/autoscaling_config_inactive === RUN TestDockerMachine/last_matching_autoscaling_config_is_selected === RUN TestDockerMachine/autoscaling_invalid_period_config --- PASS: TestDockerMachine (0.00s) --- PASS: TestDockerMachine/autoscaling_config_active (0.00s) --- PASS: TestDockerMachine/autoscaling_overrides_offpeak_config (0.00s) --- PASS: TestDockerMachine/global_config_only (0.00s) --- PASS: TestDockerMachine/offpeak_active (0.00s) --- PASS: TestDockerMachine/offpeak_inactive (0.00s) --- PASS: TestDockerMachine/offpeak_invalid_format (0.00s) --- PASS: TestDockerMachine/autoscaling_config_inactive (0.00s) --- PASS: TestDockerMachine/last_matching_autoscaling_config_is_selected (0.00s) --- PASS: TestDockerMachine/autoscaling_invalid_period_config (0.00s) === RUN TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout === RUN TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/undefined === RUN TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/timeouts_lower_than_0 === RUN TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/timeouts_greater_than_0 --- PASS: TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout (0.00s) --- PASS: TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/undefined (0.00s) --- PASS: TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/timeouts_lower_than_0 (0.00s) --- PASS: TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout/timeouts_greater_than_0 (0.00s) === RUN TestDockerConfig_GetPullPolicies === RUN TestDockerConfig_GetPullPolicies/empty_pull_policy === RUN TestDockerConfig_GetPullPolicies/empty_string_pull_policy === RUN TestDockerConfig_GetPullPolicies/known_elements_in_pull_policy === RUN TestDockerConfig_GetPullPolicies/invalid_pull_policy === RUN TestDockerConfig_GetPullPolicies/nil_pull_policy --- PASS: TestDockerConfig_GetPullPolicies (0.00s) --- PASS: TestDockerConfig_GetPullPolicies/empty_pull_policy (0.00s) --- PASS: TestDockerConfig_GetPullPolicies/empty_string_pull_policy (0.00s) --- PASS: TestDockerConfig_GetPullPolicies/known_elements_in_pull_policy (0.00s) --- PASS: TestDockerConfig_GetPullPolicies/invalid_pull_policy (0.00s) --- PASS: TestDockerConfig_GetPullPolicies/nil_pull_policy (0.00s) PASS coverage: 17.8% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.033s coverage: 17.8% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 5 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestKubernetesConfig_GetPullPolicies === RUN TestKubernetesConfig_GetPullPolicies/nil_pull_policy === RUN TestKubernetesConfig_GetPullPolicies/empty_pull_policy === RUN TestKubernetesConfig_GetPullPolicies/empty_string_pull_policy === RUN TestKubernetesConfig_GetPullPolicies/known_elements_in_pull_policy === RUN TestKubernetesConfig_GetPullPolicies/invalid_pull_policy --- PASS: TestKubernetesConfig_GetPullPolicies (0.00s) --- PASS: TestKubernetesConfig_GetPullPolicies/nil_pull_policy (0.00s) --- PASS: TestKubernetesConfig_GetPullPolicies/empty_pull_policy (0.00s) --- PASS: TestKubernetesConfig_GetPullPolicies/empty_string_pull_policy (0.00s) --- PASS: TestKubernetesConfig_GetPullPolicies/known_elements_in_pull_policy (0.00s) --- PASS: TestKubernetesConfig_GetPullPolicies/invalid_pull_policy (0.00s) === RUN TestStringOrArray_UnmarshalTOML === RUN TestStringOrArray_UnmarshalTOML/no_fields === RUN TestStringOrArray_UnmarshalTOML/slice_with_invalid_single_value === RUN TestStringOrArray_UnmarshalTOML/slice_with_mixed_values === RUN TestStringOrArray_UnmarshalTOML/slice_with_invalid_values === RUN TestStringOrArray_UnmarshalTOML/empty_string_or_array === RUN TestStringOrArray_UnmarshalTOML/string === RUN TestStringOrArray_UnmarshalTOML/valid_slice_with_multiple_values --- PASS: TestStringOrArray_UnmarshalTOML (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/no_fields (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/slice_with_invalid_single_value (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/slice_with_mixed_values (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/slice_with_invalid_values (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/empty_string_or_array (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/string (0.00s) --- PASS: TestStringOrArray_UnmarshalTOML/valid_slice_with_multiple_values (0.00s) === RUN TestRunnerSettings_IsFeatureFlagOn === RUN TestRunnerSettings_IsFeatureFlagOn/feature_flag_not_configured === RUN TestRunnerSettings_IsFeatureFlagOn/feature_flag_not_configured_but_feature_flag_default_is_true === RUN TestRunnerSettings_IsFeatureFlagOn/feature_flag_on === RUN TestRunnerSettings_IsFeatureFlagOn/feature_flag_off --- PASS: TestRunnerSettings_IsFeatureFlagOn (0.00s) --- PASS: TestRunnerSettings_IsFeatureFlagOn/feature_flag_not_configured (0.00s) --- PASS: TestRunnerSettings_IsFeatureFlagOn/feature_flag_not_configured_but_feature_flag_default_is_true (0.00s) --- PASS: TestRunnerSettings_IsFeatureFlagOn/feature_flag_on (0.00s) --- PASS: TestRunnerSettings_IsFeatureFlagOn/feature_flag_off (0.00s) === RUN TestBuildErrorIs === RUN TestBuildErrorIs/two_build_errors_with_the_same_failure_reason === RUN TestBuildErrorIs/different_failure_reasons === RUN TestBuildErrorIs/not_matching_errors --- PASS: TestBuildErrorIs (0.00s) --- PASS: TestBuildErrorIs/two_build_errors_with_the_same_failure_reason (0.00s) --- PASS: TestBuildErrorIs/different_failure_reasons (0.00s) --- PASS: TestBuildErrorIs/not_matching_errors (0.00s) === RUN TestUnwrapBuildError --- PASS: TestUnwrapBuildError (0.00s) === RUN TestCacheCheckPolicy --- PASS: TestCacheCheckPolicy (0.00s) === RUN TestShouldCache === RUN TestShouldCache/jobSuccess=true,when=on_success === RUN TestShouldCache/jobSuccess=true,when=always === RUN TestShouldCache/jobSuccess=true,when=on_failure === RUN TestShouldCache/jobSuccess=false,when=on_success === RUN TestShouldCache/jobSuccess=false,when=always === RUN TestShouldCache/jobSuccess=false,when=on_failure --- PASS: TestShouldCache (0.00s) --- PASS: TestShouldCache/jobSuccess=true,when=on_success (0.00s) --- PASS: TestShouldCache/jobSuccess=true,when=always (0.00s) --- PASS: TestShouldCache/jobSuccess=true,when=on_failure (0.00s) --- PASS: TestShouldCache/jobSuccess=false,when=on_success (0.00s) --- PASS: TestShouldCache/jobSuccess=false,when=always (0.00s) --- PASS: TestShouldCache/jobSuccess=false,when=on_failure (0.00s) === RUN TestSecrets_expandVariables === RUN TestSecrets_expandVariables/no_secrets_defined === RUN TestSecrets_expandVariables/nil_vault_secret === RUN TestSecrets_expandVariables/vault_missing_data === RUN TestSecrets_expandVariables/vault_missing_jwt_data === RUN TestSecrets_expandVariables/vault_secret_defined --- PASS: TestSecrets_expandVariables (0.00s) --- PASS: TestSecrets_expandVariables/no_secrets_defined (0.00s) --- PASS: TestSecrets_expandVariables/nil_vault_secret (0.00s) --- PASS: TestSecrets_expandVariables/vault_missing_data (0.00s) --- PASS: TestSecrets_expandVariables/vault_missing_jwt_data (0.00s) --- PASS: TestSecrets_expandVariables/vault_secret_defined (0.00s) === RUN TestJobResponse_JobURL --- PASS: TestJobResponse_JobURL (0.00s) === RUN TestDefaultResolver_Resolve === RUN TestDefaultResolver_Resolve/secret_resolved_properly === RUN TestDefaultResolver_Resolve/no_supported_resolvers_present === RUN TestDefaultResolver_Resolve/resolver_creation_error === RUN TestDefaultResolver_Resolve/no_secrets_to_resolve === RUN TestDefaultResolver_Resolve/error_on_secret_resolving --- PASS: TestDefaultResolver_Resolve (0.00s) --- PASS: TestDefaultResolver_Resolve/secret_resolved_properly (0.00s) secrets_test.go:39: PASS: Println(string) secrets_test.go:149: PASS: IsSupported() secrets_test.go:149: PASS: Name() secrets_test.go:149: PASS: Resolve() secrets_test.go:149: PASS: IsSupported() --- PASS: TestDefaultResolver_Resolve/no_supported_resolvers_present (0.00s) secrets_test.go:92: PASS: Println(string) secrets_test.go:92: PASS: Warningln(string) secrets_test.go:149: PASS: IsSupported() secrets_test.go:149: PASS: Name() secrets_test.go:149: PASS: IsSupported() --- PASS: TestDefaultResolver_Resolve/resolver_creation_error (0.00s) --- PASS: TestDefaultResolver_Resolve/no_secrets_to_resolve (0.00s) secrets_test.go:39: PASS: Println(string) --- PASS: TestDefaultResolver_Resolve/error_on_secret_resolving (0.00s) secrets_test.go:39: PASS: Println(string) secrets_test.go:144: PASS: IsSupported() secrets_test.go:144: PASS: Name() secrets_test.go:144: PASS: Resolve() secrets_test.go:144: PASS: IsSupported() PASS coverage: 4.7% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.019s coverage: 4.7% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 6 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestVariablesJSON --- PASS: TestVariablesJSON (0.00s) === RUN TestVariableString --- PASS: TestVariableString (0.00s) === RUN TestPublicAndInternalVariables --- PASS: TestPublicAndInternalVariables (0.00s) === RUN TestMaskedVariables --- PASS: TestMaskedVariables (0.00s) === RUN TestListVariables --- PASS: TestListVariables (0.00s) === RUN TestGetVariable --- PASS: TestGetVariable (0.00s) === RUN TestParseVariable --- PASS: TestParseVariable (0.00s) === RUN TestInvalidParseVariable --- PASS: TestInvalidParseVariable (0.00s) === RUN TestVariablesExpansion --- PASS: TestVariablesExpansion (0.00s) === RUN TestSpecialVariablesExpansion --- PASS: TestSpecialVariablesExpansion (0.00s) PASS coverage: 1.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.020s coverage: 1.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 7 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/common' package with coverprofile in 'count' mode:  === RUN TestOverwriteKey --- PASS: TestOverwriteKey (0.00s) === RUN TestMultipleUsageOfAKey === RUN TestMultipleUsageOfAKey/defined_at_job_level === RUN TestMultipleUsageOfAKey/defined_at_default_and_job_level === RUN TestMultipleUsageOfAKey/defined_at_config,_default_and_job_level === RUN TestMultipleUsageOfAKey/defined_at_config_and_default_level === RUN TestMultipleUsageOfAKey/defined_at_config_level --- PASS: TestMultipleUsageOfAKey (0.00s) --- PASS: TestMultipleUsageOfAKey/defined_at_job_level (0.00s) --- PASS: TestMultipleUsageOfAKey/defined_at_default_and_job_level (0.00s) --- PASS: TestMultipleUsageOfAKey/defined_at_config,_default_and_job_level (0.00s) --- PASS: TestMultipleUsageOfAKey/defined_at_config_and_default_level (0.00s) --- PASS: TestMultipleUsageOfAKey/defined_at_config_level (0.00s) === RUN TestRawVariableExpansion === RUN TestRawVariableExpansion/raw-true === RUN TestRawVariableExpansion/raw-false --- PASS: TestRawVariableExpansion (0.00s) --- PASS: TestRawVariableExpansion/raw-true (0.00s) --- PASS: TestRawVariableExpansion/raw-false (0.00s) === RUN TestPredefinedServerVariables --- PASS: TestPredefinedServerVariables (0.00s) PASS coverage: 2.1% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/common 0.020s coverage: 2.1% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/custom' package with coverprofile in 'count' mode:  Compiling test executor Executing: /usr/local/go/bin/go build -o /tmp/test_executor415184490/main testdata/test_executor/main.go === RUN TestConfig_GetConfigExecTimeout === RUN TestConfig_GetConfigExecTimeout/source_value_greater_than_zero === RUN TestConfig_GetConfigExecTimeout/source_undefined === RUN TestConfig_GetConfigExecTimeout/source_value_lower_than_zero --- PASS: TestConfig_GetConfigExecTimeout (0.00s) --- PASS: TestConfig_GetConfigExecTimeout/source_value_greater_than_zero (0.00s) --- PASS: TestConfig_GetConfigExecTimeout/source_undefined (0.00s) --- PASS: TestConfig_GetConfigExecTimeout/source_value_lower_than_zero (0.00s) === RUN TestConfig_GetPrepareExecTimeout === RUN TestConfig_GetPrepareExecTimeout/source_undefined === RUN TestConfig_GetPrepareExecTimeout/source_value_lower_than_zero === RUN TestConfig_GetPrepareExecTimeout/source_value_greater_than_zero --- PASS: TestConfig_GetPrepareExecTimeout (0.00s) --- PASS: TestConfig_GetPrepareExecTimeout/source_undefined (0.00s) --- PASS: TestConfig_GetPrepareExecTimeout/source_value_lower_than_zero (0.00s) --- PASS: TestConfig_GetPrepareExecTimeout/source_value_greater_than_zero (0.00s) === RUN TestConfig_GetCleanupExecTimeout === RUN TestConfig_GetCleanupExecTimeout/source_undefined === RUN TestConfig_GetCleanupExecTimeout/source_value_lower_than_zero === RUN TestConfig_GetCleanupExecTimeout/source_value_greater_than_zero --- PASS: TestConfig_GetCleanupExecTimeout (0.00s) --- PASS: TestConfig_GetCleanupExecTimeout/source_undefined (0.00s) --- PASS: TestConfig_GetCleanupExecTimeout/source_value_lower_than_zero (0.00s) --- PASS: TestConfig_GetCleanupExecTimeout/source_value_greater_than_zero (0.00s) === RUN TestConfig_GetTerminateTimeout === RUN TestConfig_GetTerminateTimeout/source_undefined === RUN TestConfig_GetTerminateTimeout/source_value_lower_than_zero === RUN TestConfig_GetTerminateTimeout/source_value_greater_than_zero --- PASS: TestConfig_GetTerminateTimeout (0.00s) --- PASS: TestConfig_GetTerminateTimeout/source_undefined (0.00s) --- PASS: TestConfig_GetTerminateTimeout/source_value_lower_than_zero (0.00s) --- PASS: TestConfig_GetTerminateTimeout/source_value_greater_than_zero (0.00s) === RUN TestConfig_GetForceKillTimeout === RUN TestConfig_GetForceKillTimeout/source_undefined === RUN TestConfig_GetForceKillTimeout/source_value_lower_than_zero === RUN TestConfig_GetForceKillTimeout/source_value_greater_than_zero --- PASS: TestConfig_GetForceKillTimeout (0.00s) --- PASS: TestConfig_GetForceKillTimeout/source_undefined (0.00s) --- PASS: TestConfig_GetForceKillTimeout/source_value_lower_than_zero (0.00s) --- PASS: TestConfig_GetForceKillTimeout/source_value_greater_than_zero (0.00s) === RUN TestExecutor_Prepare === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_invalid_JSON === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_empty_JSON === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_and_driver_info_missing_version === RUN TestExecutor_Prepare/custom_executor_set_with_PrepareExec_with_error === RUN TestExecutor_Prepare/custom_executor_set_with_valid_job_env === RUN TestExecutor_Prepare/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix === RUN TestExecutor_Prepare/custom_executor_not_set === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_error === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_undefined_builds_dir === RUN TestExecutor_Prepare/custom_executor_set_without_RunExec === RUN TestExecutor_Prepare/custom_executor_set === RUN TestExecutor_Prepare/AbstractExecutor.Prepare_failure === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec === RUN TestExecutor_Prepare/custom_executor_set_with_ConfigExec_and_driver_info_missing_name === RUN TestExecutor_Prepare/custom_executor_set_with_PrepareExec --- PASS: TestExecutor_Prepare (0.01s) --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_invalid_JSON (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_empty_JSON (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_and_driver_info_missing_version (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_PrepareExec_with_error (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_valid_job_env (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_not_set (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_error (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_with_undefined_builds_dir (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_without_RunExec (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Prepare/custom_executor_set (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Prepare/AbstractExecutor.Prepare_failure (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_ConfigExec_and_driver_info_missing_name (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Prepare/custom_executor_set_with_PrepareExec (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() === RUN TestExecutor_Cleanup === RUN TestExecutor_Cleanup/custom_executor_set_with_CleanupExec_with_error time="2021-05-20T15:29:12Z" level=warning msg="some error message in commands output" cleanup_std=err job=15 project=0 runner=RuNnErTo time="2021-05-20T15:29:12Z" level=warning msg="Cleanup script failed: test-error" job=15 project=0 runner=RuNnErTo === RUN TestExecutor_Cleanup/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix === RUN TestExecutor_Cleanup/custom_executor_not_set time="2021-05-20T15:29:12Z" level=warning msg="custom executor not configured" job=17 project=0 runner=RuNnErTo === RUN TestExecutor_Cleanup/custom_executor_set_without_RunExec time="2021-05-20T15:29:12Z" level=warning msg="custom executor is missing RunExec" job=18 project=0 runner=RuNnErTo === RUN TestExecutor_Cleanup/custom_executor_set === RUN TestExecutor_Cleanup/custom_executor_set_with_CleanupExec --- PASS: TestExecutor_Cleanup (0.00s) --- PASS: TestExecutor_Cleanup/custom_executor_set_with_CleanupExec_with_error (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Cleanup/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Cleanup/custom_executor_not_set (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Cleanup/custom_executor_set_without_RunExec (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Cleanup/custom_executor_set (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Cleanup/custom_executor_set_with_CleanupExec (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() === RUN TestExecutor_Run === RUN TestExecutor_Run/Run_fails_on_tempdir_operations === RUN TestExecutor_Run/Run_executes_job time="2021-05-20T15:29:12Z" level=warning msg="Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426" job=22 project=0 runner=RuNnErTo === RUN TestExecutor_Run/Run_executes_job_with_error time="2021-05-20T15:29:12Z" level=warning msg="Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426" job=23 project=0 runner=RuNnErTo === RUN TestExecutor_Run/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix time="2021-05-20T15:29:12Z" level=warning msg="Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426" job=24 project=0 runner=RuNnErTo --- PASS: TestExecutor_Run (0.00s) --- PASS: TestExecutor_Run/Run_fails_on_tempdir_operations (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() --- PASS: TestExecutor_Run/Run_executes_job (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Run/Run_executes_job_with_error (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Run/custom_executor_set_with_valid_job_env,_verify_variable_order_and_prefix (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() === RUN TestExecutor_Env === RUN TestExecutor_Env/custom_executor_set_no_variable_to_expand_CUSTOM_ENV_CI_JOB_IMAGE === RUN TestExecutor_Env/custom_executor_set_CUSTOM_ENV_CI_JOB_IMAGE === RUN TestExecutor_Env/custom_executor_set_empty_CUSTOM_ENV_CI_JOB_IMAGE === RUN TestExecutor_Env/custom_executor_set_expanded_CUSTOM_ENV_CI_JOB_IMAGE --- PASS: TestExecutor_Env (0.00s) --- PASS: TestExecutor_Env/custom_executor_set_no_variable_to_expand_CUSTOM_ENV_CI_JOB_IMAGE (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Env/custom_executor_set_CUSTOM_ENV_CI_JOB_IMAGE (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Env/custom_executor_set_empty_CUSTOM_ENV_CI_JOB_IMAGE (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_Env/custom_executor_set_expanded_CUSTOM_ENV_CI_JOB_IMAGE (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() === RUN TestExecutor_ServicesEnv === RUN TestExecutor_ServicesEnv/returns_only_name_when_service_name_is_the_only_definition === RUN TestExecutor_ServicesEnv/returns_full_service_definition === RUN TestExecutor_ServicesEnv/returns_both_simple_and_full_service_definitions === RUN TestExecutor_ServicesEnv/does_not_create_env_CI_JOB_SERVICES --- PASS: TestExecutor_ServicesEnv (0.00s) --- PASS: TestExecutor_ServicesEnv/returns_only_name_when_service_name_is_the_only_definition (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_ServicesEnv/returns_full_service_definition (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_ServicesEnv/returns_both_simple_and_full_service_definitions (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() --- PASS: TestExecutor_ServicesEnv/does_not_create_env_CI_JOB_SERVICES (0.00s) custom_test.go:114: PASS: Write(string) custom_test.go:114: PASS: IsStdout() custom_test.go:173: PASS: Run() PASS coverage: 8.5% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/custom 0.571s coverage: 8.5% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 1 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/custom' package with coverprofile in 'count' mode:  Compiling test executor Executing: /usr/local/go/bin/go build -o /tmp/test_executor600878577/main testdata/test_executor/main.go === RUN TestExecutor_Connect --- PASS: TestExecutor_Connect (0.00s) === RUN TestBuildSuccess === RUN TestBuildSuccess/bash Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor655885643/script700882478/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor655885643/script700882478/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor655885643/script043919285/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor655885643/script043919285/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test803131036/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor655885643/script166458768/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor655885643/script166458768/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< time="2021-05-20T15:29:15Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:15Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:15Z" level=warning cleanup_std=err job=0 project=0 Job succeeded === RUN TestBuildSuccess/cmd === RUN TestBuildSuccess/powershell === RUN TestBuildSuccess/pwsh Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor008865090/script713242553/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor008865090/script713242553/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor008865090/script178444740/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor008865090/script178444740/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test907545007/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor008865090/script734498387/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor008865090/script734498387/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< time="2021-05-20T15:29:17Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:17Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:17Z" level=warning cleanup_std=err job=0 project=0 Job succeeded --- PASS: TestBuildSuccess (2.26s) --- PASS: TestBuildSuccess/bash (0.09s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test803131036 --- SKIP: TestBuildSuccess/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildSuccess/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildSuccess/pwsh (2.17s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test907545007 === RUN TestBuildSuccessRawVariable === RUN TestBuildSuccessRawVariable/bash time="2021-05-20T15:29:17Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:17Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:17Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildSuccessRawVariable/cmd === RUN TestBuildSuccessRawVariable/powershell === RUN TestBuildSuccessRawVariable/pwsh time="2021-05-20T15:29:20Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildSuccessRawVariable (3.06s) --- PASS: TestBuildSuccessRawVariable/bash (0.49s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test104949142 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor488196093/script421801784/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor488196093/script421801784/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000aa060), Stderr:(*bytes.Buffer)(0xc0000aa060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor488196093/script324539703/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor488196093/script324539703/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test104949142/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor488196093/script030452522/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor488196093/script030452522/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo $TEST $VARIABLE$WITH$DOLLARS$$ <<<<<<<<<< Job succeeded  --- SKIP: TestBuildSuccessRawVariable/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildSuccessRawVariable/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildSuccessRawVariable/pwsh (2.58s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test628559489 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor633027564/script806710875/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor633027564/script806710875/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor633027564/script181324798/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor633027564/script181324798/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test628559489/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor633027564/script271003973/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor633027564/script271003973/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo $env:TEST $VARIABLE$WITH$DOLLARS$$ <<<<<<<<<< Job succeeded  === RUN TestBuildBuildFailure === RUN TestBuildBuildFailure/bash Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor418407871/script787998226/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job setting build failure mocked build failure Exitting with code 1 time="2021-05-20T15:29:20Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: exit status 1. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information === RUN TestBuildBuildFailure/cmd === RUN TestBuildBuildFailure/powershell === RUN TestBuildBuildFailure/pwsh Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor770614036/script848358755/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job setting build failure mocked build failure Exitting with code 1 time="2021-05-20T15:29:20Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:20Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: exit status 1. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information --- PASS: TestBuildBuildFailure (0.53s) --- PASS: TestBuildBuildFailure/bash (0.01s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test450931680 --- SKIP: TestBuildBuildFailure/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildBuildFailure/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildBuildFailure/pwsh (0.52s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test072545353 === RUN TestBuildSystemFailure === RUN TestBuildSystemFailure/bash Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor132870029/script661519752/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure mocked system failure Exitting with code 2 time="2021-05-20T15:29:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: exit status 2. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information === RUN TestBuildSystemFailure/cmd === RUN TestBuildSystemFailure/powershell === RUN TestBuildSystemFailure/pwsh Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor612875770/script041417489/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure mocked system failure Exitting with code 2 time="2021-05-20T15:29:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: exit status 2. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information --- PASS: TestBuildSystemFailure (0.55s) --- PASS: TestBuildSystemFailure/bash (0.01s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test677623142 integration_test.go:187: prepare environment: exit status 2. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information --- SKIP: TestBuildSystemFailure/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildSystemFailure/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildSystemFailure/pwsh (0.54s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test902558535 integration_test.go:187: prepare environment: exit status 2. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information === RUN TestBuildUnknownFailure === RUN TestBuildUnknownFailure/bash Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor037842795/script675922382/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job mocked system failure time="2021-05-20T15:29:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:21Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: unknown Custom executor executable exit code 255; executable execution terminated with: exit status 255. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information === RUN TestBuildUnknownFailure/cmd === RUN TestBuildUnknownFailure/powershell === RUN TestBuildUnknownFailure/pwsh Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor219961392/script007341519/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job mocked system failure time="2021-05-20T15:29:22Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:22Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:22Z" level=warning cleanup_std=err job=0 project=0 ERROR: Job failed (system failure): prepare environment: unknown Custom executor executable exit code 255; executable execution terminated with: exit status 255. Check https://docs.gitlab.com/runner/shells/index.html#shell-profile-loading for more information --- PASS: TestBuildUnknownFailure (0.53s) --- PASS: TestBuildUnknownFailure/bash (0.01s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test939713852 --- SKIP: TestBuildUnknownFailure/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildUnknownFailure/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildUnknownFailure/pwsh (0.52s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test558779093 === RUN TestBuildCancel === RUN TestBuildCancel/bash === RUN TestBuildCancel/bash/system_interrupt time="2021-05-20T15:29:22Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:22Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:22Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:22Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildCancel/bash/job_is_aborted time="2021-05-20T15:29:23Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:23Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:23Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:23Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildCancel/bash/job_is_canceling time="2021-05-20T15:29:23Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:23Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:23Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:23Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildCancel/cmd === RUN TestBuildCancel/powershell === RUN TestBuildCancel/pwsh === RUN TestBuildCancel/pwsh/system_interrupt time="2021-05-20T15:29:26Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:26Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:26Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:26Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildCancel/pwsh/job_is_aborted time="2021-05-20T15:29:27Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:27Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:27Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:27Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildCancel/pwsh/job_is_canceling time="2021-05-20T15:29:29Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:29:29Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:29Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:29Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildCancel (7.16s) --- PASS: TestBuildCancel/bash (1.82s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test808361186 --- PASS: TestBuildCancel/bash/system_interrupt (0.60s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor681139929/script080861284/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor681139929/script080861284/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000ac060), Stderr:(*bytes.Buffer)(0xc0000ac060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor681139929/script000567411/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor681139929/script000567411/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test808361186/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor681139929/script633929014/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor681139929/script633929014/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007e1b0), Stderr:(*bytes.Buffer)(0xc00007e1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: aborted: interrupt  --- PASS: TestBuildCancel/bash/job_is_aborted (0.60s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor054218525/script790939608/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor054218525/script790939608/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor054218525/script439667543/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor054218525/script439667543/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test808361186/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor054218525/script182016202/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor054218525/script182016202/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: canceled  --- PASS: TestBuildCancel/bash/job_is_canceling (0.60s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor453045665/script621074572/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor453045665/script621074572/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor453045665/script206716539/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor453045665/script206716539/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test808361186/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor453045665/script145998750/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor453045665/script145998750/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: canceled  --- SKIP: TestBuildCancel/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildCancel/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildCancel/pwsh (5.34s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test952240229 --- PASS: TestBuildCancel/pwsh/system_interrupt (1.61s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor439296640/script071456223/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor439296640/script071456223/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor439296640/script362699698/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor439296640/script362699698/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000921b0), Stderr:(*bytes.Buffer)(0xc0000921b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test952240229/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor439296640/script024789353/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor439296640/script024789353/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: aborted: interrupt  --- PASS: TestBuildCancel/pwsh/job_is_aborted (1.61s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor595394484/script407268739/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor595394484/script407268739/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor595394484/script963622150/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor595394484/script963622150/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007e1b0), Stderr:(*bytes.Buffer)(0xc00007e1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test952240229/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor595394484/script456572589/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor595394484/script456572589/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: canceled  --- PASS: TestBuildCancel/pwsh/job_is_canceling (1.61s) abort.go:85: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor053952040/script436127079/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor053952040/script436127079/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor053952040/script312272794/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor053952040/script312272794/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test952240229/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor053952040/script562775089/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor053952040/script562775089/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} ERROR: Job failed: canceled  === RUN TestBuildMasking === RUN TestBuildMasking/bash time="2021-05-20T15:29:29Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:29Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:29Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildMasking/cmd === RUN TestBuildMasking/powershell === RUN TestBuildMasking/pwsh time="2021-05-20T15:29:32Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildMasking (3.09s) --- PASS: TestBuildMasking/bash (0.49s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test045829084 --- SKIP: TestBuildMasking/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildMasking/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildMasking/pwsh (2.60s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test442062978 === RUN TestBuildWithGitStrategyCloneWithoutLFS === RUN TestBuildWithGitStrategyCloneWithoutLFS/bash time="2021-05-20T15:29:32Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:32Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithGitStrategyCloneWithoutLFS/cmd === RUN TestBuildWithGitStrategyCloneWithoutLFS/powershell === RUN TestBuildWithGitStrategyCloneWithoutLFS/pwsh time="2021-05-20T15:29:34Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:34Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:34Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithGitStrategyCloneWithoutLFS (3.89s) --- PASS: TestBuildWithGitStrategyCloneWithoutLFS/bash (0.16s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test897626232 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor979693431/script988792938/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor979693431/script988792938/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor979693431/script456495809/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor979693431/script456495809/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test897626232/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor979693431/script100741420/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor979693431/script100741420/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor426426523/script736829246/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor426426523/script736829246/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor426426523/script062091141/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor426426523/script062091141/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test897626232/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor426426523/script846367520/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor426426523/script846367520/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildWithGitStrategyCloneWithoutLFS/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithGitStrategyCloneWithoutLFS/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithGitStrategyCloneWithoutLFS/pwsh (3.73s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test766437887 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor635530066/script791541385/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor635530066/script791541385/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor635530066/script298715732/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor635530066/script298715732/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test766437887/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor635530066/script878288803/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor635530066/script878288803/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor204637350/script027569101/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor204637350/script027569101/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor204637350/script789223624/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor204637350/script789223624/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test766437887/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor204637350/script834185095/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor204637350/script834185095/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS === RUN TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/bash time="2021-05-20T15:29:36Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:36Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/cmd === RUN TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/powershell === RUN TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/pwsh time="2021-05-20T15:29:38Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:38Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:38Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:29:39Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:39Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:39Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS (3.74s) --- PASS: TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/bash (0.11s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test982035258 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor022575953/script122732156/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor022575953/script122732156/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor022575953/script636435883/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor022575953/script636435883/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test982035258/builds/project-0/.git/ Created fresh repository. Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor022575953/script013062414/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor022575953/script013062414/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor352183061/script271784816/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor352183061/script271784816/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor352183061/script434501647/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor352183061/script434501647/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test982035258/builds/project-0/.git/ Created fresh repository. Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor352183061/script734968866/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor352183061/script734968866/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS/pwsh (3.63s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test710458137 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor718114724/script670156979/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor718114724/script670156979/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor718114724/script232323702/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor718114724/script232323702/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test710458137/builds/project-0/.git/ Created fresh repository. Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor718114724/script653545821/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor718114724/script653545821/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor378506008/script887787415/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor378506008/script887787415/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor378506008/script567626762/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor378506008/script567626762/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test710458137/builds/project-0/.git/ Created fresh repository. Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor378506008/script777846241/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor378506008/script777846241/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  PASS coverage: 26.9% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/custom 25.343s coverage: 26.9% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 2 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/custom' package with coverprofile in 'count' mode:  Compiling test executor Executing: /usr/local/go/bin/go build -o /tmp/test_executor534484322/main testdata/test_executor/main.go === RUN TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone === RUN TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/bash time="2021-05-20T15:29:42Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:42Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:42Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/cmd === RUN TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/powershell === RUN TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/pwsh time="2021-05-20T15:29:44Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:44Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:44Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone (2.00s) --- PASS: TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/bash (0.03s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test004287833 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor026517732/script271395059/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor026517732/script271395059/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor026517732/script883294134/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor026517732/script883294134/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Skipping Git repository setup Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor026517732/script301166493/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor026517732/script301166493/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone/pwsh (1.97s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test012311128 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor279303127/script150918986/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor279303127/script150918986/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor279303127/script931400737/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor279303127/script931400737/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Skipping Git repository setup Skipping Git checkout Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor279303127/script034725132/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor279303127/script034725132/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildWithoutDebugTrace === RUN TestBuildWithoutDebugTrace/bash time="2021-05-20T15:29:44Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:44Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:44Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithoutDebugTrace/cmd === RUN TestBuildWithoutDebugTrace/powershell === RUN TestBuildWithoutDebugTrace/pwsh time="2021-05-20T15:29:46Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:46Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:46Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithoutDebugTrace (2.21s) --- PASS: TestBuildWithoutDebugTrace/bash (0.08s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test402911483 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor236342302/script197698789/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor236342302/script197698789/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor236342302/script534447360/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor236342302/script534447360/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test402911483/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor236342302/script983690847/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor236342302/script983690847/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildWithoutDebugTrace/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithoutDebugTrace/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithoutDebugTrace/pwsh (2.12s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test883318834 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor091227113/script255420468/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor091227113/script255420468/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor091227113/script919739395/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor091227113/script919739395/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000aa060), Stderr:(*bytes.Buffer)(0xc0000aa060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test883318834/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor091227113/script231143814/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor091227113/script231143814/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildWithDebugTrace === RUN TestBuildWithDebugTrace/bash time="2021-05-20T15:29:46Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:46Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:46Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithDebugTrace/cmd === RUN TestBuildWithDebugTrace/powershell === RUN TestBuildWithDebugTrace/pwsh time="2021-05-20T15:29:48Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:48Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:48Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithDebugTrace (2.21s) --- PASS: TestBuildWithDebugTrace/bash (0.09s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test709061421 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor261222568/script652827111/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor261222568/script652827111/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> + set -eo pipefail + set +o noclobber + : + eval 'echo "Running on $(hostname)..." ' +++ hostname ++ echo 'Running on runner-pvr9xbdq-project-250833-concurrent-0...' Running on runner-pvr9xbdq-project-250833-concurrent-0... + exit 0 <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor261222568/script694617626/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor261222568/script694617626/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> + set -eo pipefail + set +o noclobber + : + eval 'export FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=$'\''false'\'' export FF_NETWORK_PER_BUILD=$'\''false'\'' export FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=$'\''true'\'' export FF_USE_DIRECT_DOWNLOAD=$'\''true'\'' export FF_SKIP_NOOP_BUILD_STAGES=$'\''true'\'' export FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=$'\''false'\'' export FF_RESET_HELPER_IMAGE_ENTRYPOINT=$'\''true'\'' export FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=$'\''true'\'' export FF_USE_FASTZIP=$'\''false'\'' export FF_GITLAB_REGISTRY_HELPER_IMAGE=$'\''false'\'' export FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=$'\''false'\'' export FF_ENABLE_BASH_EXIT_CODE_CHECK=$'\''false'\'' export FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=$'\''true'\'' export FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=$'\''false'\'' export FF_USE_NEW_BASH_EVAL_STRATEGY=$'\''false'\'' export FF_USE_POWERSHELL_PATH_RESOLVER=$'\''false'\'' export CI_RUNNER_SHORT_TOKEN='\'''\'' export CI_BUILDS_DIR=$'\''/tmp/gitlab-runner-custom-executor-test709061421/builds'\'' export CI_PROJECT_DIR=$'\''/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0'\'' export CI_CONCURRENT_ID=0 export CI_CONCURRENT_PROJECT_ID=0 export CI_SERVER=$'\''yes'\'' export CI_JOB_STATUS=$'\''running'\'' export CI_DEBUG_TRACE=$'\''true'\'' export CI_SHARED_ENVIRONMENT=$'\''true'\'' export CI_RUNNER_VERSION=13.12.0 export CI_RUNNER_REVISION=$'\''7a6612da'\'' export CI_RUNNER_EXECUTABLE_ARCH=$'\''linux/amd64'\'' export GIT_LFS_SKIP_SMUDGE=1 $'\''rm'\'' "-r" "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0" echo $'\''\x1b[32;1mFetching changes...\x1b[0;m'\'' $'\''mkdir'\'' "-p" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template" $'\''git'\'' "config" "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template/config" "fetch.recurseSubmodules" "false" $'\''rm'\'' "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/index.lock" $'\''rm'\'' "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/shallow.lock" $'\''rm'\'' "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/HEAD.lock" $'\''rm'\'' "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/hooks/post-checkout" $'\''rm'\'' "-f" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/config.lock" $'\''git'\'' "init" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0" "--template" "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template" $'\''cd'\'' "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0" if $'\''git'\'' "remote" "add" "origin" "/builds/gitlab-org/gitlab-runner/tmp/gitlab-test/.git" >/dev/null 2>/dev/null; then echo $'\''\x1b[32;1mCreated fresh repository.\x1b[0;m'\'' else $'\''git'\'' "remote" "set-url" "origin" "/builds/gitlab-org/gitlab-runner/tmp/gitlab-test/.git" fi $'\''git'\'' "-c" "http.userAgent=gitlab-runner 13.12.0 linux/amd64" "fetch" "origin" "+refs/heads/*:refs/origin/heads/*" "+refs/tags/*:refs/tags/*" "--prune" "--quiet" echo $'\''\x1b[32;1mChecking out 91956efe as master...\x1b[0;m'\'' $'\''git'\'' "checkout" "-f" "-q" "91956efe32fb7bef54f378d90c9bd74c19025872" $'\''git'\'' "clean" "-ffdx" if $'\''git'\'' "lfs" "version" >/dev/null 2>/dev/null; then $'\''git'\'' "lfs" "pull" echo fi echo $'\''\x1b[32;1mSkipping Git submodules setup\x1b[0;m'\'' ' ++ export FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=false ++ FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=false ++ export FF_NETWORK_PER_BUILD=false ++ FF_NETWORK_PER_BUILD=false ++ export FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=true ++ FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=true ++ export FF_USE_DIRECT_DOWNLOAD=true ++ FF_USE_DIRECT_DOWNLOAD=true ++ export FF_SKIP_NOOP_BUILD_STAGES=true ++ FF_SKIP_NOOP_BUILD_STAGES=true ++ export FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=false ++ FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=false ++ export FF_RESET_HELPER_IMAGE_ENTRYPOINT=true ++ FF_RESET_HELPER_IMAGE_ENTRYPOINT=true ++ export FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=true ++ FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=true ++ export FF_USE_FASTZIP=false ++ FF_USE_FASTZIP=false ++ export FF_GITLAB_REGISTRY_HELPER_IMAGE=false ++ FF_GITLAB_REGISTRY_HELPER_IMAGE=false ++ export FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=false ++ FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=false ++ export FF_ENABLE_BASH_EXIT_CODE_CHECK=false ++ FF_ENABLE_BASH_EXIT_CODE_CHECK=false ++ export FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=true ++ FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=true ++ export FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=false ++ FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=false ++ export FF_USE_NEW_BASH_EVAL_STRATEGY=false ++ FF_USE_NEW_BASH_EVAL_STRATEGY=false ++ export FF_USE_POWERSHELL_PATH_RESOLVER=false ++ FF_USE_POWERSHELL_PATH_RESOLVER=false ++ export CI_RUNNER_SHORT_TOKEN= ++ CI_RUNNER_SHORT_TOKEN= ++ export CI_BUILDS_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds ++ CI_BUILDS_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds ++ export CI_PROJECT_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ CI_PROJECT_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ export CI_CONCURRENT_ID=0 ++ CI_CONCURRENT_ID=0 ++ export CI_CONCURRENT_PROJECT_ID=0 ++ CI_CONCURRENT_PROJECT_ID=0 ++ export CI_SERVER=yes ++ CI_SERVER=yes ++ export CI_JOB_STATUS=running ++ CI_JOB_STATUS=running ++ export CI_DEBUG_TRACE=true ++ CI_DEBUG_TRACE=true ++ export CI_SHARED_ENVIRONMENT=true ++ CI_SHARED_ENVIRONMENT=true ++ export CI_RUNNER_VERSION=13.12.0 ++ CI_RUNNER_VERSION=13.12.0 ++ export CI_RUNNER_REVISION=7a6612da ++ CI_RUNNER_REVISION=7a6612da ++ export CI_RUNNER_EXECUTABLE_ARCH=linux/amd64 ++ CI_RUNNER_EXECUTABLE_ARCH=linux/amd64 ++ export GIT_LFS_SKIP_SMUDGE=1 ++ GIT_LFS_SKIP_SMUDGE=1 ++ rm -r -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ echo 'Fetching changes...' Fetching changes... ++ mkdir -p /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template ++ git config -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template/config fetch.recurseSubmodules false ++ rm -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/index.lock ++ rm -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/shallow.lock ++ rm -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/HEAD.lock ++ rm -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/hooks/post-checkout ++ rm -f /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/config.lock ++ git init /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 --template /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0.tmp/git-template Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0/.git/ ++ cd /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ git remote add origin /builds/gitlab-org/gitlab-runner/tmp/gitlab-test/.git ++ echo 'Created fresh repository.' Created fresh repository. ++ git -c 'http.userAgent=gitlab-runner 13.12.0 linux/amd64' fetch origin '+refs/heads/*:refs/origin/heads/*' '+refs/tags/*:refs/tags/*' --prune --quiet ++ echo 'Checking out 91956efe as master...' Checking out 91956efe as master... ++ git checkout -f -q 91956efe32fb7bef54f378d90c9bd74c19025872 ++ git clean -ffdx ++ git lfs version ++ git lfs pull ++ echo ++ echo 'Skipping Git submodules setup' Skipping Git submodules setup + exit 0 <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor261222568/script761271985/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor261222568/script761271985/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007c1b0), Stderr:(*bytes.Buffer)(0xc00007c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> + set -eo pipefail + set +o noclobber + : + eval 'export FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=$'\''false'\'' export FF_NETWORK_PER_BUILD=$'\''false'\'' export FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=$'\''true'\'' export FF_USE_DIRECT_DOWNLOAD=$'\''true'\'' export FF_SKIP_NOOP_BUILD_STAGES=$'\''true'\'' export FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=$'\''false'\'' export FF_RESET_HELPER_IMAGE_ENTRYPOINT=$'\''true'\'' export FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=$'\''true'\'' export FF_USE_FASTZIP=$'\''false'\'' export FF_GITLAB_REGISTRY_HELPER_IMAGE=$'\''false'\'' export FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=$'\''false'\'' export FF_ENABLE_BASH_EXIT_CODE_CHECK=$'\''false'\'' export FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=$'\''true'\'' export FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=$'\''false'\'' export FF_USE_NEW_BASH_EVAL_STRATEGY=$'\''false'\'' export FF_USE_POWERSHELL_PATH_RESOLVER=$'\''false'\'' export CI_RUNNER_SHORT_TOKEN='\'''\'' export CI_BUILDS_DIR=$'\''/tmp/gitlab-runner-custom-executor-test709061421/builds'\'' export CI_PROJECT_DIR=$'\''/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0'\'' export CI_CONCURRENT_ID=0 export CI_CONCURRENT_PROJECT_ID=0 export CI_SERVER=$'\''yes'\'' export CI_JOB_STATUS=$'\''running'\'' export CI_DEBUG_TRACE=$'\''true'\'' export CI_SHARED_ENVIRONMENT=$'\''true'\'' export CI_RUNNER_VERSION=13.12.0 export CI_RUNNER_REVISION=$'\''7a6612da'\'' export CI_RUNNER_EXECUTABLE_ARCH=$'\''linux/amd64'\'' $'\''cd'\'' "/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0" echo $'\''\x1b[32;1m$ echo Hello World\x1b[0;m'\'' echo Hello World ' ++ export FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=false ++ FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=false ++ export FF_NETWORK_PER_BUILD=false ++ FF_NETWORK_PER_BUILD=false ++ export FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=true ++ FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=true ++ export FF_USE_DIRECT_DOWNLOAD=true ++ FF_USE_DIRECT_DOWNLOAD=true ++ export FF_SKIP_NOOP_BUILD_STAGES=true ++ FF_SKIP_NOOP_BUILD_STAGES=true ++ export FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=false ++ FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=false ++ export FF_RESET_HELPER_IMAGE_ENTRYPOINT=true ++ FF_RESET_HELPER_IMAGE_ENTRYPOINT=true ++ export FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=true ++ FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=true ++ export FF_USE_FASTZIP=false ++ FF_USE_FASTZIP=false ++ export FF_GITLAB_REGISTRY_HELPER_IMAGE=false ++ FF_GITLAB_REGISTRY_HELPER_IMAGE=false ++ export FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=false ++ FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=false ++ export FF_ENABLE_BASH_EXIT_CODE_CHECK=false ++ FF_ENABLE_BASH_EXIT_CODE_CHECK=false ++ export FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=true ++ FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=true ++ export FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=false ++ FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=false ++ export FF_USE_NEW_BASH_EVAL_STRATEGY=false ++ FF_USE_NEW_BASH_EVAL_STRATEGY=false ++ export FF_USE_POWERSHELL_PATH_RESOLVER=false ++ FF_USE_POWERSHELL_PATH_RESOLVER=false ++ export CI_RUNNER_SHORT_TOKEN= ++ CI_RUNNER_SHORT_TOKEN= ++ export CI_BUILDS_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds ++ CI_BUILDS_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds ++ export CI_PROJECT_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ CI_PROJECT_DIR=/tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ export CI_CONCURRENT_ID=0 ++ CI_CONCURRENT_ID=0 ++ export CI_CONCURRENT_PROJECT_ID=0 ++ CI_CONCURRENT_PROJECT_ID=0 ++ export CI_SERVER=yes ++ CI_SERVER=yes ++ export CI_JOB_STATUS=running ++ CI_JOB_STATUS=running ++ export CI_DEBUG_TRACE=true ++ CI_DEBUG_TRACE=true ++ export CI_SHARED_ENVIRONMENT=true ++ CI_SHARED_ENVIRONMENT=true ++ export CI_RUNNER_VERSION=13.12.0 ++ CI_RUNNER_VERSION=13.12.0 ++ export CI_RUNNER_REVISION=7a6612da ++ CI_RUNNER_REVISION=7a6612da ++ export CI_RUNNER_EXECUTABLE_ARCH=linux/amd64 ++ CI_RUNNER_EXECUTABLE_ARCH=linux/amd64 ++ cd /tmp/gitlab-runner-custom-executor-test709061421/builds/project-0 ++ echo '$ echo Hello World' $ echo Hello World ++ echo Hello World Hello World + exit 0 <<<<<<<<<< Job succeeded  --- SKIP: TestBuildWithDebugTrace/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithDebugTrace/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithDebugTrace/pwsh (2.13s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test012996188 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor412466187/script390154734/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor412466187/script390154734/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> DEBUG: 2+ >>>> $ErrorActionPreference = "Stop" DEBUG: 4+ >>>> echo "Running on $([Environment]::MachineName)..." DEBUG: 4+ echo "Running on $( >>>> [Environment]::MachineName)..." Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor412466187/script001058421/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor412466187/script001058421/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> DEBUG: 2+ >>>> $ErrorActionPreference = "Stop" DEBUG: ! SET $ErrorActionPreference = 'Stop'. DEBUG: 4+ >>>> $FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION="false" DEBUG: ! SET $FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION = 'false'. DEBUG: 5+ >>>> $env:FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=$FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION DEBUG: 6+ >>>> $FF_NETWORK_PER_BUILD="false" DEBUG: ! SET $FF_NETWORK_PER_BUILD = 'false'. DEBUG: 7+ >>>> $env:FF_NETWORK_PER_BUILD=$FF_NETWORK_PER_BUILD DEBUG: 8+ >>>> $FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY="true" DEBUG: ! SET $FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY = 'true'. DEBUG: 9+ >>>> $env:FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=$FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY DEBUG: 10+ >>>> $FF_USE_DIRECT_DOWNLOAD="true" DEBUG: ! SET $FF_USE_DIRECT_DOWNLOAD = 'true'. DEBUG: 11+ >>>> $env:FF_USE_DIRECT_DOWNLOAD=$FF_USE_DIRECT_DOWNLOAD DEBUG: 12+ >>>> $FF_SKIP_NOOP_BUILD_STAGES="true" DEBUG: ! SET $FF_SKIP_NOOP_BUILD_STAGES = 'true'. DEBUG: 13+ >>>> $env:FF_SKIP_NOOP_BUILD_STAGES=$FF_SKIP_NOOP_BUILD_STAGES DEBUG: 14+ >>>> $FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL="false" DEBUG: ! SET $FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL = 'false'. DEBUG: 15+ >>>> $env:FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=$FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL DEBUG: 16+ >>>> $FF_RESET_HELPER_IMAGE_ENTRYPOINT="true" DEBUG: ! SET $FF_RESET_HELPER_IMAGE_ENTRYPOINT = 'true'. DEBUG: 17+ >>>> $env:FF_RESET_HELPER_IMAGE_ENTRYPOINT=$FF_RESET_HELPER_IMAGE_ENTRYPOINT DEBUG: 18+ >>>> $FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER="true" DEBUG: ! SET $FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER = 'true'. DEBUG: 19+ >>>> $env:FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=$FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER DEBUG: 20+ >>>> $FF_USE_FASTZIP="false" DEBUG: ! SET $FF_USE_FASTZIP = 'false'. DEBUG: 21+ >>>> $env:FF_USE_FASTZIP=$FF_USE_FASTZIP DEBUG: 22+ >>>> $FF_GITLAB_REGISTRY_HELPER_IMAGE="false" DEBUG: ! SET $FF_GITLAB_REGISTRY_HELPER_IMAGE = 'false'. DEBUG: 23+ >>>> $env:FF_GITLAB_REGISTRY_HELPER_IMAGE=$FF_GITLAB_REGISTRY_HELPER_IMAGE DEBUG: 24+ >>>> $FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR="false" DEBUG: ! SET $FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR = 'false'. DEBUG: 25+ >>>> $env:FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=$FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR DEBUG: 26+ >>>> $FF_ENABLE_BASH_EXIT_CODE_CHECK="false" DEBUG: ! SET $FF_ENABLE_BASH_EXIT_CODE_CHECK = 'false'. DEBUG: 27+ >>>> $env:FF_ENABLE_BASH_EXIT_CODE_CHECK=$FF_ENABLE_BASH_EXIT_CODE_CHECK DEBUG: 28+ >>>> $FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY="true" DEBUG: ! SET $FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY = 'true'. DEBUG: 29+ >>>> $env:FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=$FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY DEBUG: 30+ >>>> $FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE="false" DEBUG: ! SET $FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE = 'false'. DEBUG: 31+ >>>> $env:FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=$FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE DEBUG: 32+ >>>> $FF_USE_NEW_BASH_EVAL_STRATEGY="false" DEBUG: ! SET $FF_USE_NEW_BASH_EVAL_STRATEGY = 'false'. DEBUG: 33+ >>>> $env:FF_USE_NEW_BASH_EVAL_STRATEGY=$FF_USE_NEW_BASH_EVAL_STRATEGY DEBUG: 34+ >>>> $FF_USE_POWERSHELL_PATH_RESOLVER="false" DEBUG: ! SET $FF_USE_POWERSHELL_PATH_RESOLVER = 'false'. DEBUG: 35+ >>>> $env:FF_USE_POWERSHELL_PATH_RESOLVER=$FF_USE_POWERSHELL_PATH_RESOLVER DEBUG: 36+ >>>> $CI_RUNNER_SHORT_TOKEN="" DEBUG: ! SET $CI_RUNNER_SHORT_TOKEN = ''. DEBUG: 37+ >>>> $env:CI_RUNNER_SHORT_TOKEN=$CI_RUNNER_SHORT_TOKEN DEBUG: 38+ >>>> $CI_BUILDS_DIR="/tmp/gitlab-runner-custom-executor-test012996188/builds" DEBUG: ! SET $CI_BUILDS_DIR = '/tmp/gitlab-runner-custom-executor-test01299618'. DEBUG: 39+ >>>> $env:CI_BUILDS_DIR=$CI_BUILDS_DIR DEBUG: 40+ >>>> $CI_PROJECT_DIR="/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" DEBUG: ! SET $CI_PROJECT_DIR = '/tmp/gitlab-runner-custom-executor-test0129961'. DEBUG: 41+ >>>> $env:CI_PROJECT_DIR=$CI_PROJECT_DIR DEBUG: 42+ >>>> $CI_CONCURRENT_ID="0" DEBUG: ! SET $CI_CONCURRENT_ID = '0'. DEBUG: 43+ >>>> $env:CI_CONCURRENT_ID=$CI_CONCURRENT_ID DEBUG: 44+ >>>> $CI_CONCURRENT_PROJECT_ID="0" DEBUG: ! SET $CI_CONCURRENT_PROJECT_ID = '0'. DEBUG: 45+ >>>> $env:CI_CONCURRENT_PROJECT_ID=$CI_CONCURRENT_PROJECT_ID DEBUG: 46+ >>>> $CI_SERVER="yes" DEBUG: ! SET $CI_SERVER = 'yes'. DEBUG: 47+ >>>> $env:CI_SERVER=$CI_SERVER DEBUG: 48+ >>>> $CI_JOB_STATUS="running" DEBUG: ! SET $CI_JOB_STATUS = 'running'. DEBUG: 49+ >>>> $env:CI_JOB_STATUS=$CI_JOB_STATUS DEBUG: 50+ >>>> $CI_DEBUG_TRACE="true" DEBUG: ! SET $CI_DEBUG_TRACE = 'true'. DEBUG: 51+ >>>> $env:CI_DEBUG_TRACE=$CI_DEBUG_TRACE DEBUG: 52+ >>>> $CI_SHARED_ENVIRONMENT="true" DEBUG: ! SET $CI_SHARED_ENVIRONMENT = 'true'. DEBUG: 53+ >>>> $env:CI_SHARED_ENVIRONMENT=$CI_SHARED_ENVIRONMENT DEBUG: 54+ >>>> $CI_RUNNER_VERSION="13.12.0" DEBUG: ! SET $CI_RUNNER_VERSION = '13.12.0'. DEBUG: 55+ >>>> $env:CI_RUNNER_VERSION=$CI_RUNNER_VERSION DEBUG: 56+ >>>> $CI_RUNNER_REVISION="7a6612da" DEBUG: ! SET $CI_RUNNER_REVISION = '7a6612da'. DEBUG: 57+ >>>> $env:CI_RUNNER_REVISION=$CI_RUNNER_REVISION DEBUG: 58+ >>>> $CI_RUNNER_EXECUTABLE_ARCH="linux/amd64" DEBUG: ! SET $CI_RUNNER_EXECUTABLE_ARCH = 'linux/amd64'. DEBUG: 59+ >>>> $env:CI_RUNNER_EXECUTABLE_ARCH=$CI_RUNNER_EXECUTABLE_ARCH DEBUG: 60+ >>>> $GIT_LFS_SKIP_SMUDGE="1" DEBUG: ! SET $GIT_LFS_SKIP_SMUDGE = '1'. DEBUG: 61+ >>>> $env:GIT_LFS_SKIP_SMUDGE=$GIT_LFS_SKIP_SMUDGE DEBUG: 62+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" -PathType Container) ) { DEBUG: 64+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0") { DEBUG: 68+ >>>> echo "Fetching changes..." Fetching changes... DEBUG: 69+ >>>> New-Item -ItemType directory -Force -Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0.tmp/git-template" | out-null DEBUG: 70+ >>>> & "git" "config" "-f" "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0.tmp/git-template/config" "fetch.recurseSubmodules" "false" DEBUG: 71+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 73+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/index.lock" -PathType Leaf) ) { DEBUG: 75+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/index.lock") { DEBUG: 79+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/shallow.lock" -PathType Leaf) ) { DEBUG: 81+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/shallow.lock") { DEBUG: 85+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/HEAD.lock" -PathType Leaf) ) { DEBUG: 87+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/HEAD.lock") { DEBUG: 91+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/hooks/post-checkout" -PathType Leaf) ) { DEBUG: 93+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/hooks/post-checkout") { DEBUG: 97+ if( >>>> (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/config.lock" -PathType Leaf) ) { DEBUG: 99+ } elseif( >>>> Test-Path "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/config.lock") { DEBUG: 103+ >>>> & "git" "init" "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" "--template" "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0.tmp/git-template" Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test012996188/builds/project-0/.git/ DEBUG: 104+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 106+ >>>> cd "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" DEBUG: 107+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 109+ >>>> Set-Variable -Name cmdErr -Value $false DEBUG: 111+ >>>> & "git" "remote" "add" "origin" "/builds/gitlab-org/gitlab-runner/tmp/gitlab-test/.git" 2>$null DEBUG: 112+ if( >>>> !$?) { throw &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 116+ if( >>>> !$cmdErr) { DEBUG: 117+ >>>> echo "Created fresh repository." Created fresh repository. DEBUG: 123+ >>>> & "git" "-c" "http.userAgent=gitlab-runner 13.12.0 linux/amd64" "fetch" "origin" "+refs/heads/*:refs/origin/heads/*" "+refs/tags/*:refs/tags/*" "--prune" "--quiet" DEBUG: 124+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 126+ >>>> echo "Checking out 91956efe as master..." Checking out 91956efe as master... DEBUG: 127+ >>>> & "git" "checkout" "-f" "-q" "91956efe32fb7bef54f378d90c9bd74c19025872" DEBUG: 128+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 130+ >>>> & "git" "clean" "-ffdx" DEBUG: 131+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 133+ >>>> Set-Variable -Name cmdErr -Value $false DEBUG: 135+ >>>> & "git" "lfs" "version" 2>$null git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) DEBUG: 136+ if( >>>> !$?) { throw &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 140+ if( >>>> !$cmdErr) { DEBUG: 141+ >>>> & "git" "lfs" "pull" DEBUG: 142+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 144+ >>>> echo "" DEBUG: 146+ >>>> echo "Skipping Git submodules setup" Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor412466187/script593595728/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor412466187/script593595728/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> DEBUG: 2+ >>>> $ErrorActionPreference = "Stop" DEBUG: 4+ >>>> $FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION="false" DEBUG: 5+ >>>> $env:FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION=$FF_CMD_DISABLE_DELAYED_ERROR_LEVEL_EXPANSION DEBUG: 6+ >>>> $FF_NETWORK_PER_BUILD="false" DEBUG: 7+ >>>> $env:FF_NETWORK_PER_BUILD=$FF_NETWORK_PER_BUILD DEBUG: 8+ >>>> $FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY="true" DEBUG: 9+ >>>> $env:FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=$FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY DEBUG: 10+ >>>> $FF_USE_DIRECT_DOWNLOAD="true" DEBUG: 11+ >>>> $env:FF_USE_DIRECT_DOWNLOAD=$FF_USE_DIRECT_DOWNLOAD DEBUG: 12+ >>>> $FF_SKIP_NOOP_BUILD_STAGES="true" DEBUG: 13+ >>>> $env:FF_SKIP_NOOP_BUILD_STAGES=$FF_SKIP_NOOP_BUILD_STAGES DEBUG: 14+ >>>> $FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL="false" DEBUG: 15+ >>>> $env:FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL=$FF_SHELL_EXECUTOR_USE_LEGACY_PROCESS_KILL DEBUG: 16+ >>>> $FF_RESET_HELPER_IMAGE_ENTRYPOINT="true" DEBUG: 17+ >>>> $env:FF_RESET_HELPER_IMAGE_ENTRYPOINT=$FF_RESET_HELPER_IMAGE_ENTRYPOINT DEBUG: 18+ >>>> $FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER="true" DEBUG: 19+ >>>> $env:FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER=$FF_USE_GO_CLOUD_WITH_CACHE_ARCHIVER DEBUG: 20+ >>>> $FF_USE_FASTZIP="false" DEBUG: 21+ >>>> $env:FF_USE_FASTZIP=$FF_USE_FASTZIP DEBUG: 22+ >>>> $FF_GITLAB_REGISTRY_HELPER_IMAGE="false" DEBUG: 23+ >>>> $env:FF_GITLAB_REGISTRY_HELPER_IMAGE=$FF_GITLAB_REGISTRY_HELPER_IMAGE DEBUG: 24+ >>>> $FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR="false" DEBUG: 25+ >>>> $env:FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR=$FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR DEBUG: 26+ >>>> $FF_ENABLE_BASH_EXIT_CODE_CHECK="false" DEBUG: 27+ >>>> $env:FF_ENABLE_BASH_EXIT_CODE_CHECK=$FF_ENABLE_BASH_EXIT_CODE_CHECK DEBUG: 28+ >>>> $FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY="true" DEBUG: 29+ >>>> $env:FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY=$FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY DEBUG: 30+ >>>> $FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE="false" DEBUG: 31+ >>>> $env:FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE=$FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE DEBUG: 32+ >>>> $FF_USE_NEW_BASH_EVAL_STRATEGY="false" DEBUG: 33+ >>>> $env:FF_USE_NEW_BASH_EVAL_STRATEGY=$FF_USE_NEW_BASH_EVAL_STRATEGY DEBUG: 34+ >>>> $FF_USE_POWERSHELL_PATH_RESOLVER="false" DEBUG: 35+ >>>> $env:FF_USE_POWERSHELL_PATH_RESOLVER=$FF_USE_POWERSHELL_PATH_RESOLVER DEBUG: 36+ >>>> $CI_RUNNER_SHORT_TOKEN="" DEBUG: 37+ >>>> $env:CI_RUNNER_SHORT_TOKEN=$CI_RUNNER_SHORT_TOKEN DEBUG: 38+ >>>> $CI_BUILDS_DIR="/tmp/gitlab-runner-custom-executor-test012996188/builds" DEBUG: 39+ >>>> $env:CI_BUILDS_DIR=$CI_BUILDS_DIR DEBUG: 40+ >>>> $CI_PROJECT_DIR="/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" DEBUG: 41+ >>>> $env:CI_PROJECT_DIR=$CI_PROJECT_DIR DEBUG: 42+ >>>> $CI_CONCURRENT_ID="0" DEBUG: 43+ >>>> $env:CI_CONCURRENT_ID=$CI_CONCURRENT_ID DEBUG: 44+ >>>> $CI_CONCURRENT_PROJECT_ID="0" DEBUG: 45+ >>>> $env:CI_CONCURRENT_PROJECT_ID=$CI_CONCURRENT_PROJECT_ID DEBUG: 46+ >>>> $CI_SERVER="yes" DEBUG: 47+ >>>> $env:CI_SERVER=$CI_SERVER DEBUG: 48+ >>>> $CI_JOB_STATUS="running" DEBUG: 49+ >>>> $env:CI_JOB_STATUS=$CI_JOB_STATUS DEBUG: 50+ >>>> $CI_DEBUG_TRACE="true" DEBUG: 51+ >>>> $env:CI_DEBUG_TRACE=$CI_DEBUG_TRACE DEBUG: 52+ >>>> $CI_SHARED_ENVIRONMENT="true" DEBUG: 53+ >>>> $env:CI_SHARED_ENVIRONMENT=$CI_SHARED_ENVIRONMENT DEBUG: 54+ >>>> $CI_RUNNER_VERSION="13.12.0" DEBUG: 55+ >>>> $env:CI_RUNNER_VERSION=$CI_RUNNER_VERSION DEBUG: 56+ >>>> $CI_RUNNER_REVISION="7a6612da" DEBUG: 57+ >>>> $env:CI_RUNNER_REVISION=$CI_RUNNER_REVISION DEBUG: 58+ >>>> $CI_RUNNER_EXECUTABLE_ARCH="linux/amd64" DEBUG: 59+ >>>> $env:CI_RUNNER_EXECUTABLE_ARCH=$CI_RUNNER_EXECUTABLE_ARCH DEBUG: 60+ >>>> cd "/tmp/gitlab-runner-custom-executor-test012996188/builds/project-0" DEBUG: 61+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } DEBUG: 63+ >>>> echo "`$ echo Hello World" $ echo Hello World DEBUG: 64+ >>>> echo Hello World Hello World DEBUG: 65+ if( >>>> !$?) { Exit &{if($LASTEXITCODE) {$LASTEXITCODE} else {1}} } <<<<<<<<<< Job succeeded  === RUN TestBuildMultilineCommand === RUN TestBuildMultilineCommand/bash time="2021-05-20T15:29:49Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:49Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:49Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildMultilineCommand/cmd === RUN TestBuildMultilineCommand/powershell === RUN TestBuildMultilineCommand/pwsh time="2021-05-20T15:29:51Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:51Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:51Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildMultilineCommand (3.01s) --- PASS: TestBuildMultilineCommand/bash (0.50s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test011575919 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor405722370/script887707257/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor405722370/script887707257/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor405722370/script452117380/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor405722370/script452117380/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test011575919/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor405722370/script655711507/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor405722370/script655711507/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ if true; then # collapsed multi-line command Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildMultilineCommand/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildMultilineCommand/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildMultilineCommand/pwsh (2.51s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test444466518 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor348172989/script080834296/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor348172989/script080834296/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007e1b0), Stderr:(*bytes.Buffer)(0xc00007e1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor348172989/script028039671/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor348172989/script028039671/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test444466518/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor348172989/script006671082/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor348172989/script006671082/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ if (0 -eq 0) { # collapsed multi-line command Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildWithGoodGitSSLCAInfo === RUN TestBuildWithGoodGitSSLCAInfo/bash time="2021-05-20T15:29:53Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:29:53Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:53Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:53Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithGoodGitSSLCAInfo/cmd === RUN TestBuildWithGoodGitSSLCAInfo/powershell === RUN TestBuildWithGoodGitSSLCAInfo/pwsh time="2021-05-20T15:29:56Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:29:56Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:56Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:56Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithGoodGitSSLCAInfo (4.53s) --- PASS: TestBuildWithGoodGitSSLCAInfo/bash (1.35s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test454277953 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor800890284/script899775771/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor800890284/script899775771/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor800890284/script233339838/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor800890284/script233339838/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00008c1b0), Stderr:(*bytes.Buffer)(0xc00008c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test454277953/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Updating/initializing submodules... Submodule 'gitlab-grack' (https://gitlab.com/gitlab-org/gitlab-grack.git) registered for path 'gitlab-grack' Cloning into '/tmp/gitlab-runner-custom-executor-test454277953/builds/gitlab-org/ci-cd/tests/gitlab-test/gitlab-grack'... Submodule path 'gitlab-grack': checked out '645f6c4c82fd3f5e06f67134450a570b795e55a6' Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor800890284/script194240517/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor800890284/script194240517/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor800890284/script036389280/script. cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  --- SKIP: TestBuildWithGoodGitSSLCAInfo/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithGoodGitSSLCAInfo/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithGoodGitSSLCAInfo/pwsh (3.18s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test625421951 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor342353362/script317454601/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor342353362/script317454601/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00008c1b0), Stderr:(*bytes.Buffer)(0xc00008c1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor342353362/script681240788/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor342353362/script681240788/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test625421951/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Updating/initializing submodules... Submodule 'gitlab-grack' (https://gitlab.com/gitlab-org/gitlab-grack.git) registered for path 'gitlab-grack' Cloning into '/tmp/gitlab-runner-custom-executor-test625421951/builds/gitlab-org/ci-cd/tests/gitlab-test/gitlab-grack'... Submodule path 'gitlab-grack': checked out '645f6c4c82fd3f5e06f67134450a570b795e55a6' git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor342353362/script790462499/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor342353362/script790462499/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor342353362/script848759078/script.ps1 cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  === RUN TestBuildWithGitSSLAndStrategyFetch === RUN TestBuildWithGitSSLAndStrategyFetch/bash time="2021-05-20T15:29:57Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:29:57Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:57Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:57Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:29:57Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:29:57Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:57Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:29:57Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildWithGitSSLAndStrategyFetch/cmd === RUN TestBuildWithGitSSLAndStrategyFetch/powershell === RUN TestBuildWithGitSSLAndStrategyFetch/pwsh time="2021-05-20T15:30:01Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:30:01Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:01Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:01Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:03Z" level=warning msg="Error while executing file based variables removal script" error="exit status 2" job=0 project=0 time="2021-05-20T15:30:03Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:03Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:03Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildWithGitSSLAndStrategyFetch (6.93s) --- PASS: TestBuildWithGitSSLAndStrategyFetch/bash (1.55s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test679367757 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor690260808/script302587399/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor690260808/script302587399/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor690260808/script287891898/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor690260808/script287891898/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000a01b0), Stderr:(*bytes.Buffer)(0xc0000a01b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test679367757/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Updating/initializing submodules... Submodule 'gitlab-grack' (https://gitlab.com/gitlab-org/gitlab-grack.git) registered for path 'gitlab-grack' Cloning into '/tmp/gitlab-runner-custom-executor-test679367757/builds/gitlab-org/ci-cd/tests/gitlab-test/gitlab-grack'... Submodule path 'gitlab-grack': checked out '645f6c4c82fd3f5e06f67134450a570b795e55a6' Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor690260808/script637373905/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor690260808/script637373905/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor690260808/script171535612/script. cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor197597739/script629386126/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor197597739/script629386126/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor197597739/script532444053/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor197597739/script532444053/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test679367757/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 91956efe as master... Updating/initializing submodules... Synchronizing submodule url for 'gitlab-grack' Entering 'gitlab-grack' Entering 'gitlab-grack' HEAD is now at 645f6c4 CHANGELOG Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor197597739/script784195056/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor197597739/script784195056/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor197597739/script382795407/script. cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  --- SKIP: TestBuildWithGitSSLAndStrategyFetch/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildWithGitSSLAndStrategyFetch/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildWithGitSSLAndStrategyFetch/pwsh (5.38s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test464011938 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor019665817/script686217252/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor019665817/script686217252/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor019665817/script081788723/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor019665817/script081788723/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test464011938/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Updating/initializing submodules... Submodule 'gitlab-grack' (https://gitlab.com/gitlab-org/gitlab-grack.git) registered for path 'gitlab-grack' Cloning into '/tmp/gitlab-runner-custom-executor-test464011938/builds/gitlab-org/ci-cd/tests/gitlab-test/gitlab-grack'... Submodule path 'gitlab-grack': checked out '645f6c4c82fd3f5e06f67134450a570b795e55a6' git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor019665817/script349207798/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor019665817/script349207798/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor019665817/script429898717/script.ps1 cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor638600600/script761065495/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor638600600/script761065495/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor638600600/script276646538/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor638600600/script276646538/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo pre-clone-script pre-clone-script Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test464011938/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Updating/initializing submodules... Synchronizing submodule url for 'gitlab-grack' Entering 'gitlab-grack' Entering 'gitlab-grack' HEAD is now at 645f6c4 CHANGELOG git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Entering 'gitlab-grack' <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor638600600/script008428641/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor638600600/script008428641/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Cleaning up file based variables Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor638600600/script247943244/script.ps1 cleanup_file_variables] RUN accepts two arguments: the path to the script to execute and the stage of the job setting system failure Unknown build stage "cleanup_file_variables" Exitting with code 2 Job succeeded  === RUN TestBuildChangesBranchesWhenFetchingRepo === RUN TestBuildChangesBranchesWhenFetchingRepo/bash time="2021-05-20T15:30:03Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:03Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:03Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:04Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:04Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:04Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildChangesBranchesWhenFetchingRepo/cmd === RUN TestBuildChangesBranchesWhenFetchingRepo/powershell === RUN TestBuildChangesBranchesWhenFetchingRepo/pwsh time="2021-05-20T15:30:07Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:07Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:07Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:09Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:09Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:09Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildChangesBranchesWhenFetchingRepo (6.19s) --- PASS: TestBuildChangesBranchesWhenFetchingRepo/bash (1.37s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test039683387 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor241138526/script245213989/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor241138526/script245213989/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000901b0), Stderr:(*bytes.Buffer)(0xc0000901b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor241138526/script201903680/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor241138526/script201903680/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test039683387/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor241138526/script509912735/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor241138526/script509912735/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000ac060), Stderr:(*bytes.Buffer)(0xc0000ac060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor496529778/script764927017/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor496529778/script764927017/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor496529778/script231444852/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor496529778/script231444852/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000ac060), Stderr:(*bytes.Buffer)(0xc0000ac060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test039683387/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 2371dd05 as add-lfs-object... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor496529778/script117665347/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor496529778/script117665347/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  --- SKIP: TestBuildChangesBranchesWhenFetchingRepo/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildChangesBranchesWhenFetchingRepo/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildChangesBranchesWhenFetchingRepo/pwsh (4.81s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test870022342 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor009324397/script737048040/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor009324397/script737048040/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc00007e1b0), Stderr:(*bytes.Buffer)(0xc00007e1b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor009324397/script638420519/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor009324397/script638420519/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test870022342/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor009324397/script033483610/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor009324397/script033483610/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor572274417/script845184412/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor572274417/script845184412/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor572274417/script009082443/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor572274417/script009082443/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test870022342/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 2371dd05 as add-lfs-object... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor572274417/script725492014/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor572274417/script725492014/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildPowerShellCatchesExceptions === RUN TestBuildPowerShellCatchesExceptions/powershell === RUN TestBuildPowerShellCatchesExceptions/pwsh time="2021-05-20T15:30:11Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:11Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:11Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:13Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:13Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:13Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:15Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:15Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:15Z" level=warning cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildPowerShellCatchesExceptions (7.99s) --- SKIP: TestBuildPowerShellCatchesExceptions/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildPowerShellCatchesExceptions/pwsh (7.99s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test042294965 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor145139344/script465043119/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor145139344/script465043119/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000aa060), Stderr:(*bytes.Buffer)(0xc0000aa060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor145139344/script085267522/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor145139344/script085267522/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test042294965/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor145139344/script281198265/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor145139344/script281198265/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor149503172/script584681811/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor149503172/script584681811/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor149503172/script599456406/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor149503172/script599456406/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test042294965/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor149503172/script925427965/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor149503172/script925427965/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor221201464/script093418551/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor221201464/script093418551/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor221201464/script071056426/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor221201464/script071056426/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test042294965/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor221201464/script236495233/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor221201464/script236495233/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor517600492/script626709339/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor517600492/script626709339/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor517600492/script043183358/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor517600492/script043183358/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000821b0), Stderr:(*bytes.Buffer)(0xc0000821b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Reinitialized existing Git repository in /tmp/gitlab-runner-custom-executor-test042294965/builds/gitlab-org/ci-cd/tests/gitlab-test/.git/ Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor517600492/script054713925/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor517600492/script054713925/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000ac060), Stderr:(*bytes.Buffer)(0xc0000ac060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ echo Hello World Hello World <<<<<<<<<< Job succeeded  === RUN TestBuildOnCustomDirectory === RUN TestBuildOnCustomDirectory/bash === RUN TestBuildOnCustomDirectory/bash/custom_directory_defined time="2021-05-20T15:30:17Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildOnCustomDirectory/bash/custom_directory_not_defined time="2021-05-20T15:30:17Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:17Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildOnCustomDirectory/cmd === RUN TestBuildOnCustomDirectory/powershell === RUN TestBuildOnCustomDirectory/pwsh === RUN TestBuildOnCustomDirectory/pwsh/custom_directory_defined time="2021-05-20T15:30:19Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:19Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:19Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildOnCustomDirectory/pwsh/custom_directory_not_defined time="2021-05-20T15:30:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildOnCustomDirectory (3.88s) --- PASS: TestBuildOnCustomDirectory/bash (0.16s) --- PASS: TestBuildOnCustomDirectory/bash/custom_directory_defined (0.07s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test597404384 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor617009343/script399873810/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor617009343/script399873810/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor617009343/script669732681/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor617009343/script669732681/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000b8060), Stderr:(*bytes.Buffer)(0xc0000b8060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/custom/directory/0/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor617009343/script273265172/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor617009343/script273265172/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ pwd /tmp/custom/directory/0/project-0 <<<<<<<<<< Job succeeded  --- PASS: TestBuildOnCustomDirectory/bash/custom_directory_not_defined (0.08s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test421379171 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor249272422/script790070925/script. prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor249272422/script790070925/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor249272422/script273109640/script. get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor249272422/script273109640/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test421379171/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor249272422/script067015239/script. build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/bin/bash", Args:[]string{"bash", "/tmp/custom-executor249272422/script067015239/script."}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ pwd /tmp/gitlab-runner-custom-executor-test421379171/builds/project-0 <<<<<<<<<< Job succeeded  --- SKIP: TestBuildOnCustomDirectory/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildOnCustomDirectory/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildOnCustomDirectory/pwsh (3.73s) --- PASS: TestBuildOnCustomDirectory/pwsh/custom_directory_defined (1.59s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test498489594 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor047476753/script900980284/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor047476753/script900980284/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor047476753/script799647851/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor047476753/script799647851/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/custom/directory/0/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor047476753/script013223118/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor047476753/script013223118/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ pwd Path ---- /tmp/custom/directory/0/project-0 <<<<<<<<<< Job succeeded  --- PASS: TestBuildOnCustomDirectory/pwsh/custom_directory_not_defined (1.63s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test725535701 test.go:24: Running with gitlab-runner 13.12.0 (7a6612da) Preparing the "custom" executor Custom Executor binary - "config" stage Mocking execution of: [] Using Custom executor... Custom Executor binary - "prepare" stage Mocking execution of: [] PREPARE doesn't accept any arguments. It just does its job Preparing environment Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor537420080/script513623247/script.ps1 prepare_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor537420080/script513623247/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Running on runner-pvr9xbdq-project-250833-concurrent-0... <<<<<<<<<< Getting source from Git repository Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor537420080/script739044322/script.ps1 get_sources] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor537420080/script739044322/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000801b0), Stderr:(*bytes.Buffer)(0xc0000801b0), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> Fetching changes... Initialized empty Git repository in /tmp/gitlab-runner-custom-executor-test725535701/builds/project-0/.git/ Created fresh repository. Checking out 91956efe as master... git-lfs/2.11.0 (GitHub; linux amd64; go 1.14.1; git 48b28d97) Skipping Git submodules setup <<<<<<<<<< Executing "step_script" stage of the job script WARNING: Starting with version 14.0 the 'build_script' stage will be replaced with 'step_script': https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26426 Custom Executor binary - "run" stage Mocking execution of: [/tmp/custom-executor537420080/script820995545/script.ps1 build_script] RUN accepts two arguments: the path to the script to execute and the stage of the job Executing: &exec.Cmd{Path:"/usr/bin/pwsh", Args:[]string{"pwsh", "-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command", "/tmp/custom-executor537420080/script820995545/script.ps1"}, Env:[]string(nil), Dir:"", Stdin:io.Reader(nil), Stdout:(*bytes.Buffer)(0xc0000aa060), Stderr:(*bytes.Buffer)(0xc0000aa060), ExtraFiles:[]*os.File(nil), SysProcAttr:(*syscall.SysProcAttr)(nil), Process:(*os.Process)(nil), ProcessState:(*os.ProcessState)(nil), ctx:context.Context(nil), lookPathErr:error(nil), finished:false, childFiles:[]*os.File(nil), closeAfterStart:[]io.Closer(nil), closeAfterWait:[]io.Closer(nil), goroutine:[]func() error(nil), errch:(chan error)(nil), waitDone:(chan struct {})(nil)} >>>>>>>>>> $ pwd Path ---- /tmp/gitlab-runner-custom-executor-test725535701/builds/project-0 <<<<<<<<<< Job succeeded  === RUN TestBuildLogLimitExceeded === RUN TestBuildLogLimitExceeded/bash === RUN TestBuildLogLimitExceeded/bash/canceled_job time="2021-05-20T15:30:21Z" level=warning msg="Error while executing file based variables removal script" error="context canceled" job=0 project=0 time="2021-05-20T15:30:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildLogLimitExceeded/bash/successful_job time="2021-05-20T15:30:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildLogLimitExceeded/bash/failed_job time="2021-05-20T15:30:21Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:21Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildLogLimitExceeded/cmd === RUN TestBuildLogLimitExceeded/powershell === RUN TestBuildLogLimitExceeded/pwsh === RUN TestBuildLogLimitExceeded/pwsh/successful_job time="2021-05-20T15:30:23Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:23Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:23Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildLogLimitExceeded/pwsh/failed_job time="2021-05-20T15:30:25Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:25Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:25Z" level=warning cleanup_std=err job=0 project=0 === RUN TestBuildLogLimitExceeded/pwsh/canceled_job time="2021-05-20T15:30:25Z" level=warning msg="Custom Executor binary - \"cleanup\" stage" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:25Z" level=warning msg="Mocking execution of: []" cleanup_std=err job=0 project=0 time="2021-05-20T15:30:25Z" level=warning cleanup_std=err job=0 project=0 --- PASS: TestBuildLogLimitExceeded (3.91s) --- PASS: TestBuildLogLimitExceeded/bash (0.21s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test893172580 --- PASS: TestBuildLogLimitExceeded/bash/canceled_job (0.05s) --- PASS: TestBuildLogLimitExceeded/bash/successful_job (0.07s) --- PASS: TestBuildLogLimitExceeded/bash/failed_job (0.08s) --- SKIP: TestBuildLogLimitExceeded/cmd (0.00s) integration_tests.go:14: cmd failed exec: "cmd": executable file not found in $PATH --- SKIP: TestBuildLogLimitExceeded/powershell (0.00s) integration_tests.go:14: powershell failed exec: "powershell": executable file not found in $PATH --- PASS: TestBuildLogLimitExceeded/pwsh (3.70s) integration_test.go:49: Build directory: /tmp/gitlab-runner-custom-executor-test945681513 --- PASS: TestBuildLogLimitExceeded/pwsh/successful_job (1.59s) --- PASS: TestBuildLogLimitExceeded/pwsh/failed_job (1.60s) --- PASS: TestBuildLogLimitExceeded/pwsh/canceled_job (0.00s) PASS coverage: 27.6% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/custom 43.389s coverage: 27.6% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/custom/command' package with coverprofile in 'count' mode:  === RUN TestCommand_Run === RUN TestCommand_Run/error_on_cmd_start() === RUN TestCommand_Run/command_ends_with_a_build_failure === RUN TestCommand_Run/command_ends_with_a_system_failure === RUN TestCommand_Run/command_ends_with_a_unknown_failure === RUN TestCommand_Run/command_times_out --- PASS: TestCommand_Run (1.50s) --- PASS: TestCommand_Run/error_on_cmd_start() (0.00s) command_test.go:34: PASS: Start() command_test.go:34: PASS: Wait() --- PASS: TestCommand_Run/command_ends_with_a_build_failure (0.50s) command_test.go:34: PASS: Start() command_test.go:34: PASS: Wait() --- PASS: TestCommand_Run/command_ends_with_a_system_failure (0.50s) command_test.go:34: PASS: Start() command_test.go:34: PASS: Wait() --- PASS: TestCommand_Run/command_ends_with_a_unknown_failure (0.50s) command_test.go:34: PASS: Start() command_test.go:34: PASS: Wait() --- PASS: TestCommand_Run/command_times_out (0.00s) command_test.go:34: PASS: Start() command_test.go:34: PASS: Wait() command_test.go:35: PASS: KillAndWait(*process.MockCommander,string) PASS coverage: 1.4% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/custom/command 1.518s coverage: 1.4% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 0 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/docker' package with coverprofile in 'count' mode:  go: downloading github.com/docker/cli v20.10.2+incompatible go: downloading gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: downloading github.com/bmatcuk/doublestar v1.3.0 go: downloading github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: downloading github.com/hashicorp/go-version v1.2.1 go: extracting github.com/hashicorp/go-version v1.2.1 go: extracting github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: extracting github.com/bmatcuk/doublestar v1.3.0 go: extracting gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: extracting github.com/docker/cli v20.10.2+incompatible go: downloading github.com/docker/docker-credential-helpers v0.4.1 go: extracting github.com/docker/docker-credential-helpers v0.4.1 go: finding github.com/bmatcuk/doublestar v1.3.0 go: finding github.com/docker/cli v20.10.2+incompatible go: finding github.com/kardianos/osext v0.0.0-20160811001526-c2c54e542fb7 go: finding gitlab.com/gitlab-org/gitlab-terminal v0.0.0-20210104151801-2a71b03b4462 go: finding github.com/docker/docker-credential-helpers v0.4.1 go: finding github.com/hashicorp/go-version v1.2.1 === RUN TestParseDeviceStringOne --- PASS: TestParseDeviceStringOne (0.00s) === RUN TestParseDeviceStringTwo --- PASS: TestParseDeviceStringTwo (0.00s) === RUN TestParseDeviceStringThree --- PASS: TestParseDeviceStringThree (0.00s) === RUN TestParseDeviceStringFour --- PASS: TestParseDeviceStringFour (0.00s) === RUN TestBindDeviceRequests === RUN TestBindDeviceRequests/all === RUN TestBindDeviceRequests/#00 === RUN TestBindDeviceRequests/somestring=thatshouldtriggeranerror --- PASS: TestBindDeviceRequests (0.00s) --- PASS: TestBindDeviceRequests/all (0.00s) --- PASS: TestBindDeviceRequests/#00 (0.00s) --- PASS: TestBindDeviceRequests/somestring=thatshouldtriggeranerror (0.00s) === RUN TestVerifyAllowedImage --- PASS: TestVerifyAllowedImage (0.00s) === RUN TestServiceFromNamedImage === RUN TestServiceFromNamedImage/service === RUN TestServiceFromNamedImage/service:version === RUN TestServiceFromNamedImage/namespace/service === RUN TestServiceFromNamedImage/namespace/service:version === RUN TestServiceFromNamedImage/domain.tld/service === RUN TestServiceFromNamedImage/domain.tld/service:version === RUN TestServiceFromNamedImage/domain.tld/namespace/service === RUN TestServiceFromNamedImage/domain.tld/namespace/service:version === RUN TestServiceFromNamedImage/domain.tld:8080/service === RUN TestServiceFromNamedImage/domain.tld:8080/service:version === RUN TestServiceFromNamedImage/domain.tld:8080/namespace/service === RUN TestServiceFromNamedImage/domain.tld:8080/namespace/service:version === RUN TestServiceFromNamedImage/subdomain.domain.tld:8080/service === RUN TestServiceFromNamedImage/subdomain.domain.tld:8080/service:version === RUN TestServiceFromNamedImage/subdomain.domain.tld:8080/namespace/service === RUN TestServiceFromNamedImage/subdomain.domain.tld:8080/namespace/service:version --- PASS: TestServiceFromNamedImage (0.01s) --- PASS: TestServiceFromNamedImage/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/namespace/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/namespace/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld/namespace/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld/namespace/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld:8080/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld:8080/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld:8080/namespace/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/domain.tld:8080/namespace/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/subdomain.domain.tld:8080/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/subdomain.domain.tld:8080/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/subdomain.domain.tld:8080/namespace/service (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) --- PASS: TestServiceFromNamedImage/subdomain.domain.tld:8080/namespace/service:version (0.00s) docker_test.go:288: PASS: GetDockerImage(string) docker_test.go:288: PASS: ContainerRemove(*context.emptyCtx,mock.argumentMatcher,types.ContainerRemoveOptions) docker_test.go:288: PASS: NetworkList(*context.emptyCtx,network.ListOptions) docker_test.go:288: PASS: NetworkDisconnect(*context.emptyCtx,string,mock.argumentMatcher,bool) docker_test.go:288: PASS: ContainerCreate(string,string,string,string,string) docker_test.go:288: PASS: ContainerStart(*context.emptyCtx,string,string) === RUN TestHelperImageWithVariable --- PASS: TestHelperImageWithVariable (0.00s) docker_test.go:330: PASS: GetDockerImage(string) === RUN TestPrepareBuildsDir === RUN TestPrepareBuildsDir/rootDir's_parent_mounted_as_volume === RUN TestPrepareBuildsDir/rootDir_is_not_an_absolute_path === RUN TestPrepareBuildsDir/rootDir_is_/ === RUN TestPrepareBuildsDir/error_on_volume_parsing === RUN TestPrepareBuildsDir/error_on_volume_parser_creation === RUN TestPrepareBuildsDir/rootDir_mounted_as_host_based_volume === RUN TestPrepareBuildsDir/rootDir_mounted_as_container_based_volume === RUN TestPrepareBuildsDir/rootDir_not_mounted_as_volume --- PASS: TestPrepareBuildsDir (0.00s) --- PASS: TestPrepareBuildsDir/rootDir's_parent_mounted_as_volume (0.00s) --- PASS: TestPrepareBuildsDir/rootDir_is_not_an_absolute_path (0.00s) --- PASS: TestPrepareBuildsDir/rootDir_is_/ (0.00s) --- PASS: TestPrepareBuildsDir/error_on_volume_parsing (0.00s) --- PASS: TestPrepareBuildsDir/error_on_volume_parser_creation (0.00s) --- PASS: TestPrepareBuildsDir/rootDir_mounted_as_host_based_volume (0.00s) --- PASS: TestPrepareBuildsDir/rootDir_mounted_as_container_based_volume (0.00s) --- PASS: TestPrepareBuildsDir/rootDir_not_mounted_as_volume (0.00s) === RUN TestCreateVolumes === RUN TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_other_error_on_user_volume === RUN TestCreateVolumes/volumes_manager_not_created === RUN TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_no_errors_on_user_volume === RUN TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_cache_containers_disabled_wrapped_error_on_user_volume === RUN TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_duplicated_error_on_user_volume === RUN TestCreateVolumes/no_volumes_defined,_empty_buildsDir,_clone_strategy,_no_errors === RUN TestCreateVolumes/no_volumes_defined,_defined_buildsDir,_clone_strategy,_no_errors === RUN TestCreateVolumes/no_volumes_defined,_defined_buildsDir,_fetch_strategy,_no_errors === RUN TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_cache_containers_disabled_error_on_user_volume --- PASS: TestCreateVolumes (0.00s) --- PASS: TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_other_error_on_user_volume (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/volumes_manager_not_created (0.00s) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_no_errors_on_user_volume (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_cache_containers_disabled_wrapped_error_on_user_volume (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_duplicated_error_on_user_volume (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/no_volumes_defined,_empty_buildsDir,_clone_strategy,_no_errors (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/no_volumes_defined,_defined_buildsDir,_clone_strategy,_no_errors (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/no_volumes_defined,_defined_buildsDir,_fetch_strategy,_no_errors (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:452: PASS: Close() --- PASS: TestCreateVolumes/volumes_defined,_empty_buildsDir,_clone_strategy,_cache_containers_disabled_error_on_user_volume (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() PASS coverage: 8.7% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/docker 0.038s coverage: 8.7% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 1 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/docker' package with coverprofile in 'count' mode:  === RUN TestCreateBuildVolume === RUN TestCreateBuildVolume/volumes_manager_not_created === RUN TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_other_error === RUN TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_duplicated_error === RUN TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_other_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_other_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled_wrapped_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled,_duplicated_error === RUN TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_duplicated_error === RUN TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_duplicated_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_wrapped_duplicated_error === RUN TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_other_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_no_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_duplicated_error === RUN TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled === RUN TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_no_error === RUN TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_no_error === RUN TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_no_error --- PASS: TestCreateBuildVolume (0.01s) --- PASS: TestCreateBuildVolume/volumes_manager_not_created (0.00s) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_other_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_other_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_other_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled_wrapped_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled,_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_wrapped_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_other_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_no_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_duplicated_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_non-empty_buildsDir,_cache_volumes_disabled (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_empty_buildsDir,_no_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_clone,_non-empty_buildsDir,_no_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:452: PASS: Close() --- PASS: TestCreateBuildVolume/git_strategy_fetch,_empty_buildsDir,_no_error (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: Create(string,string) docker_test.go:452: PASS: Close() === RUN TestCreateDependencies --- PASS: TestCreateDependencies (0.00s) docker_test.go:451: PASS: RemoveTemporary(string) docker_test.go:451: PASS: CreateTemporary(string,string) docker_test.go:451: PASS: Create(string,string) docker_test.go:451: PASS: Binds() docker_test.go:452: PASS: Close() docker_test.go:452: PASS: ImageInspectWithRaw(string,string) docker_test.go:452: PASS: NetworkList(string,string) docker_test.go:452: PASS: ContainerRemove(string,mock.argumentMatcher,string) docker_test.go:452: PASS: ContainerRemove(string,string,string) docker_test.go:452: PASS: ContainerCreate(string,string,mock.argumentMatcher,string,mock.argumentMatcher) docker_test.go:452: PASS: ContainerStart(string,string,string) === RUN TestDockerMemorySetting --- PASS: TestDockerMemorySetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerMemorySwapSetting --- PASS: TestDockerMemorySwapSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerMemoryReservationSetting --- PASS: TestDockerMemoryReservationSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerCPUSSetting === RUN TestDockerCPUSSetting/0.5 === RUN TestDockerCPUSSetting/0.25 === RUN TestDockerCPUSSetting/1/3 === RUN TestDockerCPUSSetting/1/8 === RUN TestDockerCPUSSetting/0.0001 --- PASS: TestDockerCPUSSetting (0.00s) --- PASS: TestDockerCPUSSetting/0.5 (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) --- PASS: TestDockerCPUSSetting/0.25 (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) --- PASS: TestDockerCPUSSetting/1/3 (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) --- PASS: TestDockerCPUSSetting/1/8 (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) --- PASS: TestDockerCPUSSetting/0.0001 (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerCPUSetCPUsSetting --- PASS: TestDockerCPUSetCPUsSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerServicesTmpfsSetting --- PASS: TestDockerServicesTmpfsSetting (0.00s) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) === RUN TestDockerTmpfsSetting --- PASS: TestDockerTmpfsSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerServicesDNSSetting --- PASS: TestDockerServicesDNSSetting (0.00s) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) PASS coverage: 11.3% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/docker 0.037s coverage: 11.3% of statements in gitlab.com/gitlab-org/gitlab-runner/...  --- Starting part 2 of go tests of 'gitlab.com/gitlab-org/gitlab-runner/executors/docker' package with coverprofile in 'count' mode:  === RUN TestDockerServicesDNSSearchSetting --- PASS: TestDockerServicesDNSSearchSetting (0.00s) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) === RUN TestDockerServicesExtraHostsSetting --- PASS: TestDockerServicesExtraHostsSetting (0.00s) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) === RUN TestDockerServiceUserNSSetting --- PASS: TestDockerServiceUserNSSetting (0.00s) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) docker_test.go:995: PASS: ImageInspectWithRaw(string,string) docker_test.go:995: PASS: ImagePullBlocking(string,string,string) docker_test.go:995: PASS: NetworkList(string,string) docker_test.go:995: PASS: ContainerRemove(string,string,string) docker_test.go:995: PASS: ContainerStart(string,string,string) === RUN TestDockerUserNSSetting --- PASS: TestDockerUserNSSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerRuntimeSetting --- PASS: TestDockerRuntimeSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerSysctlsSetting --- PASS: TestDockerSysctlsSetting (0.00s) docker_test.go:967: PASS: ImageInspectWithRaw(string,string) docker_test.go:967: PASS: ImagePullBlocking(string,string,string) docker_test.go:967: PASS: NetworkList(string,string) docker_test.go:967: PASS: ContainerRemove(string,string,string) docker_test.go:967: PASS: ContainerInspect(string,string) === RUN TestDockerCreateNetwork === RUN TestDockerCreateNetwork/network_cleanup_failed === RUN TestDockerCreateNetwork/networks_manager_not_created === RUN TestDockerCreateNetwork/network_not_created === RUN TestDockerCreateNetwork/network_created === RUN TestDockerCreateNetwork/network_creation_failed === RUN TestDockerCreateNetwork/network_inspect_failed === RUN TestDockerCreateNetwork/removing_container_failed --- PASS: TestDockerCreateNetwork (0.00s) --- PASS: TestDockerCreateNetwork/network_cleanup_failed (0.00s) docker_test.go:1340: PASS: Create(string,string) docker_test.go:1340: PASS: Inspect(string) docker_test.go:1340: PASS: Cleanup(string) --- PASS: TestDockerCreateNetwork/networks_manager_not_created (0.00s) --- PASS: TestDockerCreateNetwork/network_not_created (0.00s) docker_test.go:1340: PASS: Create(string,string) docker_test.go:1340: PASS: Inspect(string) docker_test.go:1340: PASS: Cleanup(string) --- PASS: TestDockerCreateNetwork/network_created (0.00s) docker_test.go:1340: PASS: Create(string,string) docker_test.go:1340: PASS: Inspect(string) docker_test.go:1340: PASS: Cleanup(string) --- PASS: TestDockerCreateNetwork/network_creation_failed (0.00s) docker_test.go:1340: PASS: Create(string,string) --- PASS: TestDockerCreateNetwork/network_inspect_failed (0.00s) docker_test.go:1340: PASS: Create(string,string) docker_test.go:1340: PASS: Inspect(string) --- PASS: TestDockerCreateNetwork/removing_container_failed (0.00s) docker_test.go:1340: PASS: Create(string,string) docker_test.go:1340: PASS: Inspect(string) docker_test.go:1340: PASS: Cleanup(string) docker_test.go:1341: PASS: NetworkList(string,string) docker_test.go:1341: PASS: ContainerRemove(string,string,string) === RUN TestCheckOSType === RUN TestCheckOSType/executor_and_docker_info_match === RUN TestCheckOSType/executor_OSType_not_defined === RUN TestCheckOSType/executor_and_docker_info_mismatch --- PASS: TestCheckOSType (0.00s) --- PASS: TestCheckOSType/executor_and_docker_info_match (0.00s) --- PASS: TestCheckOSType/executor_OSType_not_defined (0.00s) --- PASS: TestCheckOSType/executor_and_docker_info_mismatch (0.00s) === RUN TestGetServiceDefinitions === RUN TestGetServiceDefinitions/all_services_with_proper_name_and_alias === RUN TestGetServiceDefinitions/build_service_not_in_internal_images_but_empty_allowed_services === RUN TestGetServiceDefinitions/build_service_not_in_internal_images === RUN TestGetServiceDefinitions/build_service_not_in_allowed_services_but_in_internal_images === RUN TestGetServiceDefinitions/empty_service_name --- PASS: TestGetServiceDefinitions (0.00s) --- PASS: TestGetServiceDefinitions/all_services_with_proper_name_and_alias (0.00s) --- PASS: TestGetServiceDefinitions/build_service_not_in_internal_images_but_empty_allowed_services (0.00s) --- PASS: TestGetServiceDefinitions/build_service_not_in_internal_images (0.00s) --- PASS: TestGetServiceDefinitions/build_service_not_in_allowed_services_but_in_internal_images (0.00s) --- PASS: TestGetServiceDefinitions/empty_service_name (0.00s) === RUN TestAddServiceHealthCheck === RUN TestAddServiceHealthCheck/network_mode_not_defined === RUN TestAddServiceHealthCheck/get_ports_via_environment === RUN TestAddServiceHealthCheck/get_port_from_many === RUN TestAddServiceHealthCheck/no_ports_defined === RUN TestAddServiceHealthCheck/container_inspect_error --- PASS: TestAddServiceHealthCheck (0.00s) --- PASS: TestAddServiceHealthCheck/network_mode_not_defined (0.00s) --- PASS: TestAddServiceHealthCheck/get_ports_via_environment (0.00s) docker_test.go:1694: PASS: ContainerInspect(string,string) --- PASS: TestAddServiceHealthCheck/get_port_from_many (0.00s) docker_test.go:1694: PASS: ContainerInspect(string,string) --- PASS: TestAddServiceHealthCheck/no_ports_defined (0.00s) docker_test.go:1694: PASS: ContainerInspect(string,string) --- PASS: TestAddServiceHealthCheck/container_inspect_error (0.00s) docker_test.go:1694: PASS: ContainerInspect(string,string) PASS coverage: 8.8% of statements in gitlab.com/gitlab-org/gitlab-runner/... ok gitlab.com/gitlab-org/gitlab-runner/executors/docker 0.027s coverage: 8.8% of statements in gitlab.com/gitlab-org/gitlab-runner/... section_end:1621524638:step_script section_start:1621524638:archive_cache Saving cache for successful job Creating cache unit test 2/8-v13-12-0-2... WARNING: /builds/gitlab-org/gitlab-runner/.gocache-false/: no matching files Uploading cache.zip to https://storage.googleapis.com/gitlab-org-ci-runners-cache/project/250833/unit%20test%202/8-v13-12-0-2 Created cache section_end:1621524638:archive_cache section_start:1621524638:upload_artifacts_on_success Uploading artifacts for successful job Uploading artifacts... .cover/*: found 15 matching files and directories  .testoutput/*: found 15 matching files and directories Uploading artifacts as "archive" to coordinator... ok id=1280281228 status=201 token=wxW5PEaM section_end:1621524640:upload_artifacts_on_success section_start:1621524640:cleanup_file_variables Cleaning up file based variables section_end:1621524641:cleanup_file_variables Job succeeded  ================================================ FILE: common/buildlogger/internal/testdata/corpus/log-3 ================================================ Running with gitlab-runner 11.4.0-rc1 (1ff344e1)  on docker-auto-scale ed2dce3a Using Docker executor with image alpine:3.7 ... Pulling docker image alpine:3.7 ... Using docker image sha256:34ea7509dcad10aa92310f2b41e3afbabed0811ee3a902d6d49cb90f075fe444 for alpine:3.7 ... section_start:1540587289:prepare_script Running on runner-ed2dce3a-project-250833-concurrent-0 via runner-ed2dce3a-srm-1540587233-a2720091... section_end:1540587291:prepare_script section_start:1540587291:get_sources Cloning repository... Cloning into '/builds/gitlab-org/gitlab-runner'... Checking out cf91d5e1 as v11.4.2... Skipping Git submodules setup section_end:1540587303:get_sources section_start:1540587303:restore_cache section_end:1540587305:restore_cache section_start:1540587305:download_artifacts Downloading artifacts for code_quality (113296602)... Downloading artifacts from coordinator... ok  id=113296602 status=200 token=9voFyq8b section_end:1540587306:download_artifacts section_start:1540587306:build_script $ unset GPG_KEY $ if [ "$(cat gl-code-quality-report.json)" != "[]" ] ; then # collapsed multi-line command section_end:1540587308:build_script section_start:1540587308:after_script section_end:1540587309:after_script section_start:1540587309:archive_cache section_end:1540587311:archive_cache section_start:1540587311:upload_artifacts_on_success section_end:1540587312:upload_artifacts_on_success Job succeeded  ================================================ FILE: common/buildlogger/internal/testdata/corpus/log-4 ================================================ Running with gitlab-runner 11.4.0-rc1 (1ff344e1)  on prm-com-gitlab-org bd091556 Using Docker executor with image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.8.7-0 ... Pulling docker image registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.8.7-0 ... Using docker image sha256:e04a22ede88d35b514bbdc50de3a5aad24756703c4df5a6ca8d114eff85b82a5 for registry.gitlab.com/gitlab-org/gitlab-runner/ci:1.8.7-0 ... section_start:1540587170:prepare_script Running on runner-bd091556-project-250833-concurrent-0 via runner-bd091556-prm-1540584797-bdf1c4f9... section_end:1540587171:prepare_script section_start:1540587171:get_sources Cloning repository... Cloning into '/builds/gitlab-org/gitlab-runner'... Checking out cf91d5e1 as v11.4.2... Skipping Git submodules setup section_end:1540587184:get_sources section_start:1540587184:restore_cache section_end:1540587186:restore_cache section_start:1540587186:download_artifacts Downloading artifacts for helper images (113296599)... Downloading artifacts from coordinator... ok  id=113296599 status=200 token=zjyCZm9o Downloading artifacts for clone test repo (113296600)... Downloading artifacts from coordinator... ok  id=113296600 status=200 token=W4uUQz5z Downloading artifacts for tests definitions (113296601)... Downloading artifacts from coordinator... ok  id=113296601 status=200 token=PuzvTPai Downloading artifacts for code_quality (113296602)... Downloading artifacts from coordinator... ok  id=113296602 status=200 token=9voFyq8b Downloading artifacts for unit tests 0 5 (113296603)... Downloading artifacts from coordinator... ok  id=113296603 status=200 token=WbgGVQ7y Downloading artifacts for unit tests 1 5 (113296604)... Downloading artifacts from coordinator... ok  id=113296604 status=200 token=3sppSbYF Downloading artifacts for unit tests 2 5 (113296606)... Downloading artifacts from coordinator... ok  id=113296606 status=200 token=ha8-ST6q Downloading artifacts for unit tests 3 5 (113296607)... Downloading artifacts from coordinator... ok  id=113296607 status=200 token=2kD26N4_ Downloading artifacts for unit tests 4 5 (113296608)... Downloading artifacts from coordinator... ok  id=113296608 status=200 token=7zGWebqN Downloading artifacts for unit tests with race 0 5 (113296609)... Downloading artifacts from coordinator... ok  id=113296609 status=200 token=RFKyWzzG Downloading artifacts for unit tests with race 1 5 (113296610)... Downloading artifacts from coordinator... ok  id=113296610 status=200 token=sTpwBPdi Downloading artifacts for unit tests with race 2 5 (113296611)... Downloading artifacts from coordinator... ok  id=113296611 status=200 token=ZCr_6jyj Downloading artifacts for unit tests with race 3 5 (113296612)... Downloading artifacts from coordinator... ok  id=113296612 status=200 token=AoQ_6DGW Downloading artifacts for unit tests with race 4 5 (113296613)... Downloading artifacts from coordinator... ok  id=113296613 status=200 token=AXxXnz1V Downloading artifacts for test coverage report (113296617)... Downloading artifacts from coordinator... ok  id=113296617 status=200 token=apWZwUpg Downloading artifacts for binaries darwin/386 darwin/amd64 (113296619)... Downloading artifacts from coordinator... ok  id=113296619 status=200 token=DuzTynpN Downloading artifacts for binaries freebsd/386 freebsd/amd64 freebsd/arm (113296620)... Downloading artifacts from coordinator... ok  id=113296620 status=200 token=8cf5c4mN Downloading artifacts for binaries linux/386 linux/amd64 linux/arm (113296621)... Downloading artifacts from coordinator... ok  id=113296621 status=200 token=HyURPmox Downloading artifacts for binaries windows/386 windows/amd64 (113296623)... Downloading artifacts from coordinator... ok  id=113296623 status=200 token=VAjCaS7j section_end:1540587195:download_artifacts section_start:1540587195:build_script $ # checking GPG signing support # collapsed multi-line command gpg: directory `/root/.gnupg' created gpg: new configuration file `/root/.gnupg/gpg.conf' created gpg: WARNING: options in `/root/.gnupg/gpg.conf' are not yet active during this run gpg: keyring `/root/.gnupg/secring.gpg' created gpg: keyring `/root/.gnupg/pubring.gpg' created gpg: key 880721D4: secret key imported gpg: /root/.gnupg/trustdb.gpg: trustdb created gpg: key 880721D4: public key "GitLab, Inc. " imported gpg: Total number processed: 1 gpg: imported: 1 (RSA: 1) gpg: secret keys read: 1 gpg: secret keys imported: 1 $ source ci/touch_make_dependencies $ make ${CI_JOB_NAME} # Installing packaging dependencies... which fpm 1>/dev/null || gem install rake fpm --no-ri --no-rdoc chmod 755 packaging/root/usr/share/gitlab-runner/ chmod 755 packaging/root/usr/share/gitlab-runner/* # Building Debian compatible packages... make package-deb-fpm ARCH=amd64 PACKAGE_ARCH=amd64 make[1]: Entering directory '/builds/gitlab-org/gitlab-runner' fpm -s dir -t deb -n gitlab-runner -v 11.4.2 \ -p out/deb/gitlab-runner_amd64.deb \ --deb-priority optional --category admin \ --force \ --deb-compression bzip2 \ --after-install packaging/scripts/postinst.deb \ --before-remove packaging/scripts/prerm.deb \ --url https://gitlab.com/gitlab-org/gitlab-runner \ --description "GitLab Runner" \ -m "GitLab Inc. " \ --license "MIT" \ --vendor "GitLab Inc." \ --conflicts gitlab-runner-beta \ --conflicts gitlab-ci-multi-runner \ --conflicts gitlab-ci-multi-runner-beta \ --provides gitlab-ci-multi-runner \ --replaces gitlab-ci-multi-runner \ --depends ca-certificates \ --depends git \ --depends curl \ --depends tar \ --deb-suggests docker-engine \ -a amd64 \ packaging/root/=/ \ out/binaries/gitlab-runner-linux-amd64=/usr/lib/gitlab-runner/gitlab-runner \ out/helper-images/=/usr/lib/gitlab-runner/helper-images/ {:timestamp=>"2018-10-26T20:53:23.535698+0000", :message=>"Debian packaging tools generally labels all files in /etc as config files, as mandated by policy, so fpm defaults to this behavior for deb packages. You can disable this default behavior with --deb-no-default-config-files flag", :level=>:warn} /var/lib/gems/2.1.0/gems/fpm-1.9.3/lib/fpm/util.rb:291: warning: Insecure world writable dir /builds/gitlab-org/gitlab-runner/.gopath in PATH, mode 040777 {:timestamp=>"2018-10-26T20:53:31.545098+0000", :message=>"Created package", :path=>"out/deb/gitlab-runner_amd64.deb"} Processing out/deb/gitlab-runner_amd64.deb... Signed deb out/deb/gitlab-runner_amd64.deb make[1]: Leaving directory '/builds/gitlab-org/gitlab-runner' make package-deb-fpm ARCH=386 PACKAGE_ARCH=i386 make[1]: Entering directory '/builds/gitlab-org/gitlab-runner' fpm -s dir -t deb -n gitlab-runner -v 11.4.2 \ -p out/deb/gitlab-runner_i386.deb \ --deb-priority optional --category admin \ --force \ --deb-compression bzip2 \ --after-install packaging/scripts/postinst.deb \ --before-remove packaging/scripts/prerm.deb \ --url https://gitlab.com/gitlab-org/gitlab-runner \ --description "GitLab Runner" \ -m "GitLab Inc. " \ --license "MIT" \ --vendor "GitLab Inc." \ --conflicts gitlab-runner-beta \ --conflicts gitlab-ci-multi-runner \ --conflicts gitlab-ci-multi-runner-beta \ --provides gitlab-ci-multi-runner \ --replaces gitlab-ci-multi-runner \ --depends ca-certificates \ --depends git \ --depends curl \ --depends tar \ --deb-suggests docker-engine \ -a i386 \ packaging/root/=/ \ out/binaries/gitlab-runner-linux-386=/usr/lib/gitlab-runner/gitlab-runner \ out/helper-images/=/usr/lib/gitlab-runner/helper-images/ {:timestamp=>"2018-10-26T20:53:32.801667+0000", :message=>"Debian packaging tools generally labels all files in /etc as config files, as mandated by policy, so fpm defaults to this behavior for deb packages. You can disable this default behavior with --deb-no-default-config-files flag", :level=>:warn} /var/lib/gems/2.1.0/gems/fpm-1.9.3/lib/fpm/util.rb:291: warning: Insecure world writable dir /builds/gitlab-org/gitlab-runner/.gopath in PATH, mode 040777 {:timestamp=>"2018-10-26T20:53:40.636303+0000", :message=>"Created package", :path=>"out/deb/gitlab-runner_i386.deb"} Processing out/deb/gitlab-runner_i386.deb... Signed deb out/deb/gitlab-runner_i386.deb make[1]: Leaving directory '/builds/gitlab-org/gitlab-runner' make package-deb-fpm ARCH=arm PACKAGE_ARCH=armel make[1]: Entering directory '/builds/gitlab-org/gitlab-runner' fpm -s dir -t deb -n gitlab-runner -v 11.4.2 \ -p out/deb/gitlab-runner_armel.deb \ --deb-priority optional --category admin \ --force \ --deb-compression bzip2 \ --after-install packaging/scripts/postinst.deb \ --before-remove packaging/scripts/prerm.deb \ --url https://gitlab.com/gitlab-org/gitlab-runner \ --description "GitLab Runner" \ -m "GitLab Inc. " \ --license "MIT" \ --vendor "GitLab Inc." \ --conflicts gitlab-runner-beta \ --conflicts gitlab-ci-multi-runner \ --conflicts gitlab-ci-multi-runner-beta \ --provides gitlab-ci-multi-runner \ --replaces gitlab-ci-multi-runner \ --depends ca-certificates \ --depends git \ --depends curl \ --depends tar \ --deb-suggests docker-engine \ -a armel \ packaging/root/=/ \ out/binaries/gitlab-runner-linux-arm=/usr/lib/gitlab-runner/gitlab-runner \ out/helper-images/=/usr/lib/gitlab-runner/helper-images/ {:timestamp=>"2018-10-26T20:53:41.938538+0000", :message=>"Debian packaging tools generally labels all files in /etc as config files, as mandated by policy, so fpm defaults to this behavior for deb packages. You can disable this default behavior with --deb-no-default-config-files flag", :level=>:warn} /var/lib/gems/2.1.0/gems/fpm-1.9.3/lib/fpm/util.rb:291: warning: Insecure world writable dir /builds/gitlab-org/gitlab-runner/.gopath in PATH, mode 040777 {:timestamp=>"2018-10-26T20:53:49.988319+0000", :message=>"Created package", :path=>"out/deb/gitlab-runner_armel.deb"} Processing out/deb/gitlab-runner_armel.deb... Signed deb out/deb/gitlab-runner_armel.deb make[1]: Leaving directory '/builds/gitlab-org/gitlab-runner' make package-deb-fpm ARCH=arm PACKAGE_ARCH=armhf make[1]: Entering directory '/builds/gitlab-org/gitlab-runner' fpm -s dir -t deb -n gitlab-runner -v 11.4.2 \ -p out/deb/gitlab-runner_armhf.deb \ --deb-priority optional --category admin \ --force \ --deb-compression bzip2 \ --after-install packaging/scripts/postinst.deb \ --before-remove packaging/scripts/prerm.deb \ --url https://gitlab.com/gitlab-org/gitlab-runner \ --description "GitLab Runner" \ -m "GitLab Inc. " \ --license "MIT" \ --vendor "GitLab Inc." \ --conflicts gitlab-runner-beta \ --conflicts gitlab-ci-multi-runner \ --conflicts gitlab-ci-multi-runner-beta \ --provides gitlab-ci-multi-runner \ --replaces gitlab-ci-multi-runner \ --depends ca-certificates \ --depends git \ --depends curl \ --depends tar \ --deb-suggests docker-engine \ -a armhf \ packaging/root/=/ \ out/binaries/gitlab-runner-linux-arm=/usr/lib/gitlab-runner/gitlab-runner \ out/helper-images/=/usr/lib/gitlab-runner/helper-images/ {:timestamp=>"2018-10-26T20:53:51.235769+0000", :message=>"Debian packaging tools generally labels all files in /etc as config files, as mandated by policy, so fpm defaults to this behavior for deb packages. You can disable this default behavior with --deb-no-default-config-files flag", :level=>:warn} /var/lib/gems/2.1.0/gems/fpm-1.9.3/lib/fpm/util.rb:291: warning: Insecure world writable dir /builds/gitlab-org/gitlab-runner/.gopath in PATH, mode 040777 {:timestamp=>"2018-10-26T20:53:59.078709+0000", :message=>"Created package", :path=>"out/deb/gitlab-runner_armhf.deb"} Processing out/deb/gitlab-runner_armhf.deb... Signed deb out/deb/gitlab-runner_armhf.deb make[1]: Leaving directory '/builds/gitlab-org/gitlab-runner' section_end:1540587240:build_script section_start:1540587240:after_script section_end:1540587242:after_script section_start:1540587242:archive_cache section_end:1540587243:archive_cache section_start:1540587243:upload_artifacts_on_success Uploading artifacts... out/deb/: found 5 matching files  WARNING: out/rpm/: no matching files  Uploading artifacts to coordinator... ok  id=113296624 status=201 token=qQrjFYp3 section_end:1540587262:upload_artifacts_on_success Job succeeded  ================================================ FILE: common/buildlogger/internal/testdata/corpus/log-5 ================================================ Running with gitlab-runner 13.12.0-rc1 (b21d5c5b)  on docker-auto-scale 72989761  feature flags: FF_GITLAB_REGISTRY_HELPER_IMAGE:true, FF_SKIP_DOCKER_MACHINE_PROVISION_ON_CREATION_FAILURE:true section_start:1624482342:resolve_secrets Resolving secrets section_end:1624482342:resolve_secrets section_start:1624482342:prepare_executor Preparing the "docker+machine" executor Using Docker executor with image openwrtorg/sdk:ath79-generic-master ... Pulling docker image openwrtorg/sdk:ath79-generic-master ... Using docker image sha256:4b162297a74401a7f00f5510fc35392b80101a47fae3564f227fc70ce7209134 for openwrtorg/sdk:ath79-generic-master with digest openwrtorg/sdk@sha256:1bab6aff4b2f81f7b0d24a3ceecc95057e81994959256411df331084dd6490bd ... section_end:1624482385:prepare_executor section_start:1624482385:prepare_script Preparing environment Running on runner-72989761-project-14926021-concurrent-0 via runner-72989761-srm-1624482294-68b0fb18... section_end:1624482388:prepare_script section_start:1624482388:get_sources Getting source from Git repository $ eval "$CI_PRE_CLONE_SCRIPT" Fetching changes with git depth set to 50... Initialized empty Git repository in /builds/openwrt/project/ustream-ssl/.git/ Created fresh repository. Checking out 68d09243 as master... Skipping Git submodules setup section_end:1624482389:get_sources section_start:1624482389:step_script Executing "step_script" stage of the job script Using docker image sha256:4b162297a74401a7f00f5510fc35392b80101a47fae3564f227fc70ce7209134 for openwrtorg/sdk:ath79-generic-master with digest openwrtorg/sdk@sha256:1bab6aff4b2f81f7b0d24a3ceecc95057e81994959256411df331084dd6490bd ... $ wget -q $CI_SOURCE_URL/Makefile -O Makefile.ci $ make ci-prepare -f Makefile.ci if [ ! -d /builds/openwrt/project/ustream-ssl/openwrt-ci ]; then \ mkdir -p /builds/openwrt/project/ustream-ssl/openwrt-ci && \ for file in openwrt-ci/common.mk openwrt-ci/pre-build.mk openwrt-ci/native-build.mk openwrt-ci/target-build.mk openwrt-ci/sdk-build.mk; do \ wget -q https://gitlab.com/ynezz/openwrt-ci/raw/master/$file -O /builds/openwrt/project/ustream-ssl/$file; \ done \ fi touch openwrt-ci/.prepared $ make ci-sdk-oot-build -f Makefile.ci mkdir -p /home/build/openwrt/tmp/ cd /home/build/openwrt && ./scripts/feeds update base Updating feed 'base' from 'https://git.openwrt.org/openwrt/openwrt.git' ... Cloning into './feeds/base'... Create index file './feeds/base.index' Checking 'working-make'... ok. Checking 'case-sensitive-fs'... ok. Checking 'proper-umask'... ok. Checking 'gcc'... ok. Checking 'working-gcc'... ok. Checking 'g++'... ok. Checking 'working-g++'... ok. Checking 'ncurses'... ok. Checking 'perl-data-dumper'... ok. Checking 'perl-thread-queue'... ok. Checking 'tar'... ok. Checking 'find'... ok. Checking 'bash'... ok. Checking 'xargs'... ok. Checking 'patch'... ok. Checking 'diff'... ok. Checking 'cp'... ok. Checking 'seq'... ok. Checking 'awk'... ok. Checking 'grep'... ok. Checking 'egrep'... ok. Checking 'getopt'... ok. Checking 'stat'... ok. Checking 'unzip'... ok. Checking 'bzip2'... ok. Checking 'wget'... ok. Checking 'perl'... ok. Checking 'python2-cleanup'... ok. Checking 'python'... ok. Checking 'python3'... ok. Checking 'git'... ok. Checking 'file'... ok. Checking 'ldconfig-stub'... ok.  Collecting package info: feeds/base/package/base-files Collecting package info: feeds/base/package/boot/arm-trusted-firmware-mediatek ERROR: please fix feeds/base/package/boot/arm-trusted-firmware-mediatek/Makefile - see logs/feeds/base/package/boot/arm-trusted-firmware-mediatek/dump.txt for details  Collecting package info: feeds/base/package/boot/arm-trusted-firmware-mvebu ERROR: please fix feeds/base/package/boot/arm-trusted-firmware-mvebu/Makefile - see logs/feeds/base/package/boot/arm-trusted-firmware-mvebu/dump.txt for details  Collecting package info: feeds/base/package/boot/arm-trusted-firmware-rockchip Collecting package info: feeds/base/package/boot/arm-trusted-firmware-sunxi ERROR: please fix feeds/base/package/boot/arm-trusted-firmware-sunxi/Makefile - see logs/feeds/base/package/boot/arm-trusted-firmware-sunxi/dump.txt for details  Collecting package info: feeds/base/package/boot/arm-trusted-firmware-tools ERROR: please fix feeds/base/package/boot/arm-trusted-firmware-tools/Makefile - see logs/feeds/base/package/boot/arm-trusted-firmware-tools/dump.txt for details  Collecting package info: feeds/base/package/boot/at91bootstrap Collecting package info: feeds/base/package/boot/fconfig Collecting package info: feeds/base/package/boot/grub2 Collecting package info: feeds/base/package/boot/imx-bootlets Collecting package info: feeds/base/package/boot/kexec-tools Collecting package info: feeds/base/package/boot/kobs-ng Collecting package info: feeds/base/package/boot/mt7623n-preloader Collecting package info: feeds/base/package/boot/tfa-layerscape Collecting package info: feeds/base/package/boot/uboot-at91 Collecting package info: feeds/base/package/boot/uboot-envtools Collecting package info: feeds/base/package/boot/uboot-fritz4040 Collecting package info: feeds/base/package/boot/uboot-imx6 Collecting package info: feeds/base/package/boot/uboot-kirkwood Collecting package info: feeds/base/package/boot/uboot-lantiq Collecting package info: feeds/base/package/boot/uboot-layerscape Collecting package info: feeds/base/package/boot/uboot-mediatek Collecting package info: feeds/base/package/boot/uboot-mvebu Collecting package info: feeds/base/package/boot/uboot-mxs Collecting package info: feeds/base/package/boot/uboot-omap Collecting package info: feeds/base/package/boot/uboot-oxnas Collecting package info: feeds/base/package/boot/uboot-ramips Collecting package info: feeds/base/package/boot/uboot-rockchip Collecting package info: feeds/base/package/boot/uboot-sunxi Collecting package info: feeds/base/package/boot/uboot-tegra Collecting package info: feeds/base/package/boot/uboot-zynq Collecting package info: feeds/base/package/devel/binutils Collecting package info: feeds/base/package/devel/gdb Collecting package info: feeds/base/package/devel/perf Collecting package info: feeds/base/package/devel/strace Collecting package info: feeds/base/package/devel/trace-cmd Collecting package info: feeds/base/package/devel/valgrind Collecting package info: feeds/base/package/firmware/amd64-microcode Collecting package info: feeds/base/package/firmware/ath10k-ct-firmware Collecting package info: feeds/base/package/firmware/b43legacy-firmware Collecting package info: feeds/base/package/firmware/cypress-firmware Collecting package info: feeds/base/package/firmware/cypress-nvram Collecting package info: feeds/base/package/firmware/intel-microcode Collecting package info: feeds/base/package/firmware/ipq-wifi Collecting package info: feeds/base/package/firmware/lantiq/dsl-vrx200-firmware-xdsl Collecting package info: feeds/base/package/firmware/layerscape/fman-ucode Collecting package info: feeds/base/package/firmware/layerscape/ls-ddr-phy Collecting package info: feeds/base/package/firmware/layerscape/ls-dpl Collecting package info: feeds/base/package/firmware/layerscape/ls-mc Collecting package info: feeds/base/package/firmware/layerscape/ls-rcw Collecting package info: feeds/base/package/firmware/layerscape/ppfe-firmware Collecting package info: feeds/base/package/firmware/linux-firmware Collecting package info: feeds/base/package/firmware/prism54-firmware Collecting package info: feeds/base/package/firmware/wireless-regdb Collecting package info: feeds/base/package/kernel/acx-mac80211 Collecting package info: feeds/base/package/kernel/ath10k-ct Collecting package info: feeds/base/package/kernel/bcm27xx-gpu-fw Collecting package info: feeds/base/package/kernel/bcm63xx-cfe Collecting package info: feeds/base/package/kernel/broadcom-wl Collecting package info: feeds/base/package/kernel/button-hotplug Collecting package info: feeds/base/package/kernel/cryptodev-linux Collecting package info: feeds/base/package/kernel/gpio-button-hotplug Collecting package info: feeds/base/package/kernel/gpio-nct5104d Collecting package info: feeds/base/package/kernel/gpio-nxp-74hc153 Collecting package info: feeds/base/package/kernel/hwmon-gsc Collecting package info: feeds/base/package/kernel/lantiq/ltq-adsl Collecting package info: feeds/base/package/kernel/lantiq/ltq-adsl-fw Collecting package info: feeds/base/package/kernel/lantiq/ltq-adsl-mei Collecting package info: feeds/base/package/kernel/lantiq/ltq-atm Collecting package info: feeds/base/package/kernel/lantiq/ltq-deu Collecting package info: feeds/base/package/kernel/lantiq/ltq-ifxos Collecting package info: feeds/base/package/kernel/lantiq/ltq-ptm Collecting package info: feeds/base/package/kernel/lantiq/ltq-tapi Collecting package info: feeds/base/package/kernel/lantiq/ltq-vdsl Collecting package info: feeds/base/package/kernel/lantiq/ltq-vdsl-fw Collecting package info: feeds/base/package/kernel/lantiq/ltq-vdsl-mei Collecting package info: feeds/base/package/kernel/lantiq/ltq-vmmc Collecting package info: feeds/base/package/kernel/linux Collecting package info: feeds/base/package/kernel/mac80211 Collecting package info: feeds/base/package/kernel/mt76 Collecting package info: feeds/base/package/kernel/mt7621-qtn-rgmii Collecting package info: feeds/base/package/kernel/mwlwifi Collecting package info: feeds/base/package/kernel/nat46 Collecting package info: feeds/base/package/kernel/om-watchdog Collecting package info: feeds/base/package/kernel/rtc-rv5c386a Collecting package info: feeds/base/package/kernel/rtl8812au-ct Collecting package info: feeds/base/package/kernel/trelay Collecting package info: feeds/base/package/libs/argp-standalone Collecting package info: feeds/base/package/libs/elfutils Collecting package info: feeds/base/package/libs/gettext Collecting package info: feeds/base/package/libs/gettext-full Collecting package info: feeds/base/package/libs/gmp Collecting package info: feeds/base/package/libs/jansson Collecting package info: feeds/base/package/libs/libaudit Collecting package info: feeds/base/package/libs/libbsd Collecting package info: feeds/base/package/libs/libcap Collecting package info: feeds/base/package/libs/libevent2 Collecting package info: feeds/base/package/libs/libiconv Collecting package info: feeds/base/package/libs/libiconv-full Collecting package info: feeds/base/package/libs/libjson-c Collecting package info: feeds/base/package/libs/libmnl Collecting package info: feeds/base/package/libs/libnetfilter-conntrack Collecting package info: feeds/base/package/libs/libnfnetlink Collecting package info: feeds/base/package/libs/libnftnl Collecting package info: feeds/base/package/libs/libnl Collecting package info: feeds/base/package/libs/libnl-tiny Collecting package info: feeds/base/package/libs/libpcap Collecting package info: feeds/base/package/libs/libselinux Collecting package info: feeds/base/package/libs/libsemanage Collecting package info: feeds/base/package/libs/libsepol Collecting package info: feeds/base/package/libs/libtool Collecting package info: feeds/base/package/libs/libubox Collecting package info: feeds/base/package/libs/libunwind Collecting package info: feeds/base/package/libs/libusb Collecting package info: feeds/base/package/libs/mbedtls Collecting package info: feeds/base/package/libs/musl-fts Collecting package info: feeds/base/package/libs/ncurses Collecting package info: feeds/base/package/libs/nettle Collecting package info: feeds/base/package/libs/openssl Collecting package info: feeds/base/package/libs/pcre Collecting package info: feeds/base/package/libs/popt Collecting package info: feeds/base/package/libs/readline Collecting package info: feeds/base/package/libs/sysfsutils Collecting package info: feeds/base/package/libs/toolchain Collecting package info: feeds/base/package/libs/uclibc++ Collecting package info: feeds/base/package/libs/uclient Collecting package info: feeds/base/package/libs/ustream-ssl Collecting package info: feeds/base/package/libs/wolfssl Collecting package info: feeds/base/package/libs/zlib Collecting package info: feeds/base/package/network/config/firewall Collecting package info: feeds/base/package/network/config/firewall4 Collecting package info: feeds/base/package/network/config/gre Collecting package info: feeds/base/package/network/config/ipip Collecting package info: feeds/base/package/network/config/ltq-adsl-app Collecting package info: feeds/base/package/network/config/ltq-vdsl-app Collecting package info: feeds/base/package/network/config/netifd Collecting package info: feeds/base/package/network/config/qos-scripts Collecting package info: feeds/base/package/network/config/soloscli Collecting package info: feeds/base/package/network/config/swconfig Collecting package info: feeds/base/package/network/config/vti Collecting package info: feeds/base/package/network/config/vxlan Collecting package info: feeds/base/package/network/config/xfrm Collecting package info: feeds/base/package/network/ipv6/464xlat Collecting package info: feeds/base/package/network/ipv6/6in4 Collecting package info: feeds/base/package/network/ipv6/6rd Collecting package info: feeds/base/package/network/ipv6/6to4 Collecting package info: feeds/base/package/network/ipv6/ds-lite Collecting package info: feeds/base/package/network/ipv6/map Collecting package info: feeds/base/package/network/ipv6/odhcp6c Collecting package info: feeds/base/package/network/ipv6/thc-ipv6 Collecting package info: feeds/base/package/network/services/dnsmasq Collecting package info: feeds/base/package/network/services/dropbear Collecting package info: feeds/base/package/network/services/ead Collecting package info: feeds/base/package/network/services/hostapd Collecting package info: feeds/base/package/network/services/ipset-dns Collecting package info: feeds/base/package/network/services/lldpd Collecting package info: feeds/base/package/network/services/odhcpd Collecting package info: feeds/base/package/network/services/omcproxy Collecting package info: feeds/base/package/network/services/ppp Collecting package info: feeds/base/package/network/services/relayd Collecting package info: feeds/base/package/network/services/uhttpd Collecting package info: feeds/base/package/network/services/umdns Collecting package info: feeds/base/package/network/utils/adb-enablemodem Collecting package info: feeds/base/package/network/utils/arptables Collecting package info: feeds/base/package/network/utils/bpftools Collecting package info: feeds/base/package/network/utils/comgt Collecting package info: feeds/base/package/network/utils/dante Collecting package info: feeds/base/package/network/utils/ebtables Collecting package info: feeds/base/package/network/utils/ethtool Collecting package info: feeds/base/package/network/utils/iproute2 Collecting package info: feeds/base/package/network/utils/ipset Collecting package info: feeds/base/package/network/utils/iptables Collecting package info: feeds/base/package/network/utils/iw Collecting package info: feeds/base/package/network/utils/iwcap Collecting package info: feeds/base/package/network/utils/iwinfo Collecting package info: feeds/base/package/network/utils/linux-atm Collecting package info: feeds/base/package/network/utils/ltq-dsl-base Collecting package info: feeds/base/package/network/utils/nftables Collecting package info: feeds/base/package/network/utils/resolveip Collecting package info: feeds/base/package/network/utils/rssileds Collecting package info: feeds/base/package/network/utils/tcpdump Collecting package info: feeds/base/package/network/utils/umbim Collecting package info: feeds/base/package/network/utils/uqmi Collecting package info: feeds/base/package/network/utils/wireguard-tools Collecting package info: feeds/base/package/network/utils/wireless-tools Collecting package info: feeds/base/package/network/utils/wpan-tools Collecting package info: feeds/base/package/network/utils/wwan Collecting package info: feeds/base/package/system/ca-certificates Collecting package info: feeds/base/package/system/fstools Collecting package info: feeds/base/package/system/fwtool Collecting package info: feeds/base/package/system/iucode-tool Collecting package info: feeds/base/package/system/mtd Collecting package info: feeds/base/package/system/openwrt-keyring Collecting package info: feeds/base/package/system/opkg Collecting package info: feeds/base/package/system/procd Collecting package info: feeds/base/package/system/refpolicy Collecting package info: feeds/base/package/system/rpcd Collecting package info: feeds/base/package/system/selinux-policy Collecting package info: feeds/base/package/system/ubox Collecting package info: feeds/base/package/system/ubus Collecting package info: feeds/base/package/system/ucert Collecting package info: feeds/base/package/system/uci Collecting package info: feeds/base/package/system/urandom-seed Collecting package info: feeds/base/package/system/urngd Collecting package info: feeds/base/package/system/usign Collecting package info: feeds/base/package/system/zram-swap Collecting package info: feeds/base/package/utils/adb Collecting package info: feeds/base/package/utils/bcm27xx-userland Collecting package info: feeds/base/package/utils/bcm4908img Collecting package info: feeds/base/package/utils/bsdiff Collecting package info: feeds/base/package/utils/busybox Collecting package info: feeds/base/package/utils/bzip2 Collecting package info: feeds/base/package/utils/checkpolicy Collecting package info: feeds/base/package/utils/ct-bugcheck Collecting package info: feeds/base/package/utils/e2fsprogs Collecting package info: feeds/base/package/utils/f2fs-tools Collecting package info: feeds/base/package/utils/fbtest Collecting package info: feeds/base/package/utils/fritz-tools Collecting package info: feeds/base/package/utils/jboot-tools Collecting package info: feeds/base/package/utils/jsonfilter Collecting package info: feeds/base/package/utils/lua Collecting package info: feeds/base/package/utils/lua5.3 Collecting package info: feeds/base/package/utils/mdadm Collecting package info: feeds/base/package/utils/mtd-utils Collecting package info: feeds/base/package/utils/nvram Collecting package info: feeds/base/package/utils/osafeloader Collecting package info: feeds/base/package/utils/oseama Collecting package info: feeds/base/package/utils/otrx Collecting package info: feeds/base/package/utils/policycoreutils Collecting package info: feeds/base/package/utils/px5g-mbedtls Collecting package info: feeds/base/package/utils/px5g-wolfssl ERROR: please fix feeds/base/package/utils/px5g-wolfssl/Makefile - see logs/feeds/base/package/utils/px5g-wolfssl/dump.txt for details  Collecting package info: feeds/base/package/utils/ravpower-mcu Collecting package info: feeds/base/package/utils/secilc Collecting package info: feeds/base/package/utils/spidev_test Collecting package info: feeds/base/package/utils/ucode Collecting package info: feeds/base/package/utils/ugps Collecting package info: feeds/base/package/utils/usbmode Collecting package info: feeds/base/package/utils/util-linux Collecting package info: feeds/base/tools/zip Collecting package info: merging... Collecting package info: done  Collecting target info: feeds/base/target/linux/apm821xx Collecting target info: feeds/base/target/linux/arc770 Collecting target info: feeds/base/target/linux/archs38 Collecting target info: feeds/base/target/linux/armvirt Collecting target info: feeds/base/target/linux/at91 Collecting target info: feeds/base/target/linux/ath25 Collecting target info: feeds/base/target/linux/ath79 Collecting target info: feeds/base/target/linux/bcm27xx Collecting target info: feeds/base/target/linux/bcm47xx Collecting target info: feeds/base/target/linux/bcm4908 Collecting target info: feeds/base/target/linux/bcm53xx Collecting target info: feeds/base/target/linux/bcm63xx Collecting target info: feeds/base/target/linux/bmips Collecting target info: feeds/base/target/linux/gemini Collecting target info: feeds/base/target/linux/imx6 Collecting target info: feeds/base/target/linux/ipq40xx Collecting target info: feeds/base/target/linux/ipq806x Collecting target info: feeds/base/target/linux/ipq807x Collecting target info: feeds/base/target/linux/kirkwood Collecting target info: feeds/base/target/linux/lantiq Collecting target info: feeds/base/target/linux/layerscape Collecting target info: feeds/base/target/linux/malta Collecting target info: feeds/base/target/linux/mediatek Collecting target info: feeds/base/target/linux/mpc85xx Collecting target info: feeds/base/target/linux/mvebu Collecting target info: feeds/base/target/linux/mxs Collecting target info: feeds/base/target/linux/octeon Collecting target info: feeds/base/target/linux/octeontx Collecting target info: feeds/base/target/linux/omap Collecting target info: feeds/base/target/linux/oxnas Collecting target info: feeds/base/target/linux/pistachio Collecting target info: feeds/base/target/linux/ramips Collecting target info: feeds/base/target/linux/realtek Collecting target info: feeds/base/target/linux/rockchip Collecting target info: feeds/base/target/linux/sunxi Collecting target info: feeds/base/target/linux/tegra Collecting target info: feeds/base/target/linux/uml Collecting target info: feeds/base/target/linux/x86 Collecting target info: feeds/base/target/linux/zynq Collecting target info: merging... Collecting target info: done cd /home/build/openwrt && ./scripts/feeds install libubox openssl WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist Installing package 'libubox' from base Ignoring feed 'packages' - index missing Ignoring feed 'luci' - index missing Ignoring feed 'routing' - index missing Ignoring feed 'telephony' - index missing Ignoring feed 'freifunk' - index missing Installing package 'lua' from base Installing package 'libjson-c' from base Installing package 'openssl' from base Installing package 'zlib' from base Installing package 'cryptodev-linux' from base cd /home/build/openwrt && make defconfig make[1]: Entering directory '/home/build/openwrt' make[2]: Entering directory '/home/build/openwrt' make[2]: Leaving directory '/home/build/openwrt' WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist tmp/.config-package.in:67:warning: ignoring type redefinition of 'PACKAGE_libc' from 'bool' to 'tristate' tmp/.config-package.in:95:warning: ignoring type redefinition of 'PACKAGE_libgcc' from 'bool' to 'tristate' tmp/.config-package.in:211:warning: ignoring type redefinition of 'PACKAGE_libpthread' from 'bool' to 'tristate' tmp/.config-package.in:637:warning: ignoring type redefinition of 'PACKAGE_libblobmsg-json' from 'bool' to 'tristate' tmp/.config-package.in:650:warning: ignoring type redefinition of 'PACKAGE_libjson-c' from 'bool' to 'tristate' tmp/.config-package.in:677:warning: ignoring type redefinition of 'PACKAGE_libubox' from 'bool' to 'tristate' tmp/.config-package.in:721:warning: ignoring type redefinition of 'PACKAGE_jshn' from 'bool' to 'tristate' tmp/.config-package.in:791:warning: ignoring type redefinition of 'PACKAGE_libjson-script' from 'bool' to 'tristate' # # configuration written to .config # make[1]: Leaving directory '/home/build/openwrt' cd /home/build/openwrt && \ for pkg in libubox openssl; do \ make package/${pkg}/{clean,compile} \ PKG_ABI_VERSION=20210623 \ V=s -j$((nproc+1)) ; \ done make[1]: Entering directory '/home/build/openwrt' make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' make[2]: Leaving directory '/home/build/openwrt' tmp/.config-package.in:67:warning: ignoring type redefinition of 'PACKAGE_libc' from 'bool' to 'tristate' tmp/.config-package.in:95:warning: ignoring type redefinition of 'PACKAGE_libgcc' from 'bool' to 'tristate' tmp/.config-package.in:211:warning: ignoring type redefinition of 'PACKAGE_libpthread' from 'bool' to 'tristate' tmp/.config-package.in:637:warning: ignoring type redefinition of 'PACKAGE_libblobmsg-json' from 'bool' to 'tristate' tmp/.config-package.in:650:warning: ignoring type redefinition of 'PACKAGE_libjson-c' from 'bool' to 'tristate' tmp/.config-package.in:677:warning: ignoring type redefinition of 'PACKAGE_libubox' from 'bool' to 'tristate' tmp/.config-package.in:721:warning: ignoring type redefinition of 'PACKAGE_jshn' from 'bool' to 'tristate' tmp/.config-package.in:791:warning: ignoring type redefinition of 'PACKAGE_libjson-script' from 'bool' to 'tristate' # # No change to .config # make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt/feeds/base/package/libs/libubox' rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.libubox_installed rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libubox.list make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/libs/libubox' time: package/feeds/base/libubox/clean#0.35#0.41#0.79 make[2]: Leaving directory '/home/build/openwrt' make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' make[2]: Leaving directory '/home/build/openwrt' tmp/.config-package.in:67:warning: ignoring type redefinition of 'PACKAGE_libc' from 'bool' to 'tristate' tmp/.config-package.in:95:warning: ignoring type redefinition of 'PACKAGE_libgcc' from 'bool' to 'tristate' tmp/.config-package.in:211:warning: ignoring type redefinition of 'PACKAGE_libpthread' from 'bool' to 'tristate' tmp/.config-package.in:637:warning: ignoring type redefinition of 'PACKAGE_libblobmsg-json' from 'bool' to 'tristate' tmp/.config-package.in:650:warning: ignoring type redefinition of 'PACKAGE_libjson-c' from 'bool' to 'tristate' tmp/.config-package.in:677:warning: ignoring type redefinition of 'PACKAGE_libubox' from 'bool' to 'tristate' tmp/.config-package.in:721:warning: ignoring type redefinition of 'PACKAGE_jshn' from 'bool' to 'tristate' tmp/.config-package.in:791:warning: ignoring type redefinition of 'PACKAGE_libjson-script' from 'bool' to 'tristate' # # No change to .config # make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt/package/toolchain' Makefile:762: WARNING: skipping libgomp -- package has no install section touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.prepared_7f1b47944ccd427bc40bcb08f4c15a24_18f1e190c5d53547fed41a3eaa76e9e9_check mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.prepared_7f1b47944ccd427bc40bcb08f4c15a24_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.toolchain_installed (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/./; if [ -x ./configure ]; then find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.guess | xargs -r chmod u+w; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.guess | xargs -r -n1 cp --remove-destination /home/build/openwrt/scripts/config.guess; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.sub | xargs -r chmod u+w; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.sub | xargs -r -n1 cp --remove-destination /home/build/openwrt/scripts/config.sub; AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " CPPFLAGS="-I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " ./configure --target=mips-openwrt-linux --host=mips-openwrt-linux --build=x86_64-pc-linux-gnu --program-prefix="" --program-suffix="" --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --libexecdir=/usr/lib --sysconfdir=/etc --datadir=/usr/share --localstatedir=/var --mandir=/usr/man --infodir=/usr/info --disable-nls ; fi; ) touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built_check cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/usr/bin cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/ld-musl-*.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ ln -sf ../../lib/libc.so /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/usr/bin/ldd cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libcrypt.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++fs.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libm.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libresolv.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libssp_nonshared.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libsupc++.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libutil.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libxnet.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libdl.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libpthread.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/librt.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.a /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/gcc/mips-openwrt-linux-musl/8.4.0/libgcc_pic.a /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/libgcc_s_pic.a; cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/gcc/mips-openwrt-linux-musl/8.4.0/libgcc.map /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/libgcc_s_pic.map touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libc_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib/libgcc_s.so.1: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc into /home/build/openwrt/bin/targets/ath79/generic/packages/libgcc1_8.4.0-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/usr/bin cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/ld-musl-*.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/ ln -sf ../../lib/libc.so /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/usr/bin/ldd find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/libc.so: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc into /home/build/openwrt/bin/targets/ath79/generic/packages/libc_1.1.24-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib/libatomic.so.1.2.0: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic into /home/build/openwrt/bin/targets/ath79/generic/packages/libatomic1_8.4.0-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib/libstdc++.so.6.0.25: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp into /home/build/openwrt/bin/targets/ath79/generic/packages/libstdcpp6_8.4.0-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/lib find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread into /home/build/openwrt/bin/targets/ath79/generic/packages/libpthread_1.1.24-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/lib find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt into /home/build/openwrt/bin/targets/ath79/generic/packages/librt_1.1.24-2_mips_24kc.ipk echo "libc" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/usr/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/usr/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread/lib touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt/lib touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '1' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libgcc.version || echo '1' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libgcc.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libgcc_installed echo "libgcc" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '1' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libatomic.version || echo '1' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libatomic.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libatomic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '6' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libstdcpp.version || echo '6' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libstdcpp.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libstdcpp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libpthread_installed echo "libpthread" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.librt_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/package/toolchain' time: package/toolchain/compile#0.77#0.74#5.49 make[3]: Entering directory '/home/build/openwrt/feeds/base/package/libs/libjson-c' mkdir -p /home/build/openwrt/dl SHELL= flock /home/build/openwrt/tmp/.json-c-0.15-nodoc.tar.gz.flock -c ' /home/build/openwrt/scripts/download.pl "/home/build/openwrt/dl" "json-c-0.15-nodoc.tar.gz" "99bca4f944b8ced8ae0bbc6310d6a3528ca715e69541793a1ef51f8c5b4b0878" "" "https://s3.amazonaws.com/json-c_releases/releases/" ' + curl -f --connect-timeout 20 --retry 5 --location --insecure https://s3.amazonaws.com/json-c_releases/releases/json-c-0.15-nodoc.tar.gz % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 147k 100 147k 0 0 406k 0 --:--:-- --:--:-- --:--:-- 406k touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.prepared_b24dee5121e4464485e171b1a37813a3_18f1e190c5d53547fed41a3eaa76e9e9_check . /home/build/openwrt/include/shell.sh; gzip -dc /home/build/openwrt/dl/json-c-0.15-nodoc.tar.gz | tar -C /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.. -xf - [ ! -d ./src/ ] || cp -fpR ./src/. /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15 Applying ./patches/000-libm.patch using plaintext: patching file math_compat.h Applying ./patches/001-dont-build-docs.patch using plaintext: patching file CMakeLists.txt touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.prepared_b24dee5121e4464485e171b1a37813a3_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.libjson-c_installed mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15 (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15; CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " cmake -DCMAKE_SYSTEM_NAME=Linux -DCMAKE_SYSTEM_VERSION=1 -DCMAKE_SYSTEM_PROCESSOR=mips -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_FLAGS_RELEASE="-DNDEBUG" -DCMAKE_CXX_FLAGS_RELEASE="-DNDEBUG" -DCMAKE_C_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc" -DCMAKE_C_COMPILER_ARG1="" -DCMAKE_CXX_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-g++" -DCMAKE_CXX_COMPILER_ARG1="" -DCMAKE_ASM_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc" -DCMAKE_ASM_COMPILER_ARG1="" -DCMAKE_EXE_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro" -DCMAKE_MODULE_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,-Bsymbolic-functions" -DCMAKE_SHARED_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,-Bsymbolic-functions" -DCMAKE_AR="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-ar" -DCMAKE_NM="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-nm" -DCMAKE_RANLIB="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-ranlib" -DCMAKE_FIND_ROOT_PATH="/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr;/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl" -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=BOTH -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY -DCMAKE_STRIP=: -DCMAKE_INSTALL_PREFIX=/usr -DDL_LIBRARY=/home/build/openwrt/staging_dir/target-mips_24kc_musl -DCMAKE_PREFIX_PATH=/home/build/openwrt/staging_dir/target-mips_24kc_musl -DCMAKE_SKIP_RPATH=TRUE -DCMAKE_EXPORT_PACKAGE_REGISTRY=FALSE -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=TRUE -DCMAKE_FIND_USE_PACKAGE_REGISTRY=FALSE -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=TRUE -DCMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY=FALSE -DCMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY=TRUE /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15 ) -- The C compiler identification is GNU 8.4.0 -- Detecting C compiler ABI info -- Detecting C compiler ABI info - done -- Check for working C compiler: /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc - skipped -- Detecting C compile features -- Detecting C compile features - done -- Looking for sys/resource.h -- Looking for sys/resource.h - found -- Looking for getrusage -- Looking for getrusage - found -- Wrote /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/apps_config.h -- Looking for fcntl.h -- Looking for fcntl.h - found -- Looking for inttypes.h -- Looking for inttypes.h - found -- Looking for stdarg.h -- Looking for stdarg.h - found -- Looking for strings.h -- Looking for strings.h - found -- Looking for string.h -- Looking for string.h - found -- Looking for syslog.h -- Looking for syslog.h - found -- Looking for 4 include files stdlib.h, ..., float.h -- Looking for 4 include files stdlib.h, ..., float.h - found -- Looking for unistd.h -- Looking for unistd.h - found -- Looking for sys/types.h -- Looking for sys/types.h - found -- Looking for dlfcn.h -- Looking for dlfcn.h - found -- Looking for endian.h -- Looking for endian.h - found -- Looking for limits.h -- Looking for limits.h - found -- Looking for locale.h -- Looking for locale.h - found -- Looking for memory.h -- Looking for memory.h - found -- Looking for stdint.h -- Looking for stdint.h - found -- Looking for stdlib.h -- Looking for stdlib.h - found -- Looking for sys/cdefs.h -- Looking for sys/cdefs.h - found -- Looking for sys/param.h -- Looking for sys/param.h - found -- Looking for sys/stat.h -- Looking for sys/stat.h - found -- Looking for xlocale.h -- Looking for xlocale.h - not found -- Looking for _isnan -- Looking for _isnan - not found -- Looking for _finite -- Looking for _finite - not found -- Looking for INFINITY -- Looking for INFINITY - found -- Looking for isinf -- Looking for isinf - found -- Looking for isnan -- Looking for isnan - found -- Looking for nan -- Looking for nan - found -- Looking for _doprnt -- Looking for _doprnt - not found -- Looking for snprintf -- Looking for snprintf - found -- Looking for vasprintf -- Looking for vasprintf - found -- Looking for vsnprintf -- Looking for vsnprintf - found -- Looking for vprintf -- Looking for vprintf - found -- Looking for open -- Looking for open - found -- Looking for realloc -- Looking for realloc - found -- Looking for setlocale -- Looking for setlocale - found -- Looking for uselocale -- Looking for uselocale - found -- Looking for strcasecmp -- Looking for strcasecmp - found -- Looking for strncasecmp -- Looking for strncasecmp - found -- Looking for strdup -- Looking for strdup - found -- Looking for strerror -- Looking for strerror - found -- Looking for vsyslog -- Looking for vsyslog - found -- Looking for strtoll -- Looking for strtoll - found -- Looking for strtoull -- Looking for strtoull - found -- Looking for stddef.h -- Looking for stddef.h - found -- Check size of int -- Check size of int - done -- Check size of int64_t -- Check size of int64_t - done -- Check size of long -- Check size of long - done -- Check size of long long -- Check size of long long - done -- Check size of size_t -- Check size of size_t - done -- Check size of ssize_t -- Check size of ssize_t - done -- Performing Test HAS_GNU_WARNING_LONG -- Performing Test HAS_GNU_WARNING_LONG - Failed -- Performing Test HAVE_ATOMIC_BUILTINS -- Performing Test HAVE_ATOMIC_BUILTINS - Success -- Performing Test HAVE___THREAD -- Performing Test HAVE___THREAD - Success -- Wrote /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/config.h -- Wrote /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/json_config.h -- Performing Test REENTRANT_WORKS -- Performing Test REENTRANT_WORKS - Success -- Performing Test BSYMBOLIC_WORKS -- Performing Test BSYMBOLIC_WORKS - Success -- Performing Test VERSION_SCRIPT_WORKS -- Performing Test VERSION_SCRIPT_WORKS - Success -- Configuring done -- Generating done CMake Warning: Manually-specified variables were not used by the project: CMAKE_ASM_COMPILER CMAKE_ASM_COMPILER_ARG1 CMAKE_CXX_COMPILER CMAKE_CXX_COMPILER_ARG1 CMAKE_CXX_FLAGS_RELEASE CMAKE_EXPORT_NO_PACKAGE_REGISTRY CMAKE_EXPORT_PACKAGE_REGISTRY CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY CMAKE_FIND_ROOT_PATH_MODE_LIBRARY CMAKE_FIND_USE_PACKAGE_REGISTRY CMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY CMAKE_MODULE_LINKER_FLAGS DL_LIBRARY -- Build files have been written to: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.built_check CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " make -j1 -C /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/. AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CROSS="mips-openwrt-linux-musl-" ARCH="mips" CMAKE_COMMAND='/home/build/openwrt/staging_dir/host/bin/cmake' CMAKE_DISABLE_cmake_check_build_system=1 ; make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' Scanning dependencies of target json-c make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 3%] Building C object CMakeFiles/json-c.dir/arraylist.c.o [ 6%] Building C object CMakeFiles/json-c.dir/debug.c.o [ 10%] Building C object CMakeFiles/json-c.dir/json_c_version.c.o [ 13%] Building C object CMakeFiles/json-c.dir/json_object.c.o [ 16%] Building C object CMakeFiles/json-c.dir/json_object_iterator.c.o [ 20%] Building C object CMakeFiles/json-c.dir/json_pointer.c.o [ 23%] Building C object CMakeFiles/json-c.dir/json_tokener.c.o [ 26%] Building C object CMakeFiles/json-c.dir/json_util.c.o [ 30%] Building C object CMakeFiles/json-c.dir/json_visit.c.o [ 33%] Building C object CMakeFiles/json-c.dir/linkhash.c.o [ 36%] Building C object CMakeFiles/json-c.dir/printbuf.c.o [ 40%] Building C object CMakeFiles/json-c.dir/random_seed.c.o [ 43%] Building C object CMakeFiles/json-c.dir/strerror_override.c.o [ 46%] Linking C shared library libjson-c.so make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 46%] Built target json-c make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' Scanning dependencies of target json-c-static make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 50%] Building C object CMakeFiles/json-c-static.dir/arraylist.c.o [ 53%] Building C object CMakeFiles/json-c-static.dir/debug.c.o [ 56%] Building C object CMakeFiles/json-c-static.dir/json_c_version.c.o [ 60%] Building C object CMakeFiles/json-c-static.dir/json_object.c.o [ 63%] Building C object CMakeFiles/json-c-static.dir/json_object_iterator.c.o [ 66%] Building C object CMakeFiles/json-c-static.dir/json_pointer.c.o [ 70%] Building C object CMakeFiles/json-c-static.dir/json_tokener.c.o [ 73%] Building C object CMakeFiles/json-c-static.dir/json_util.c.o [ 76%] Building C object CMakeFiles/json-c-static.dir/json_visit.c.o [ 80%] Building C object CMakeFiles/json-c-static.dir/linkhash.c.o [ 83%] Building C object CMakeFiles/json-c-static.dir/printbuf.c.o [ 86%] Building C object CMakeFiles/json-c-static.dir/random_seed.c.o [ 90%] Building C object CMakeFiles/json-c-static.dir/strerror_override.c.o [ 93%] Linking C static library libjson-c.a make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 93%] Built target json-c-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' Scanning dependencies of target json_parse make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 96%] Building C object apps/CMakeFiles/json_parse.dir/json_parse.c.o [100%] Linking C executable json_parse make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [100%] Built target json_parse make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/. AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15=json-c-0.15 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CROSS="mips-openwrt-linux-musl-" ARCH="mips" CMAKE_COMMAND='/home/build/openwrt/staging_dir/host/bin/cmake' CMAKE_DISABLE_cmake_check_build_system=1 DESTDIR="/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install" install; make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 46%] Built target json-c make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [ 93%] Built target json-c-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' [100%] Built target json_parse make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' Install the project... -- Install configuration: "Release" -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.so.5.1.0 -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.so.5 -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.so -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.a -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/cmake/json-c/json-c-targets.cmake -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/cmake/json-c/json-c-targets-release.cmake -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/cmake/json-c/json-c-config.cmake -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/pkgconfig/json-c.pc -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_config.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/arraylist.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/debug.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_c_version.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_inttypes.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_object.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_object_iterator.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_pointer.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_tokener.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_types.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_util.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/json_visit.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/linkhash.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/include/json-c/printbuf.h make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15' touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c/usr/lib cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c/usr/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '5' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libjson-c.version || echo '5' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libjson-c.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.pkgdir/libjson-c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libjson-c_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c/usr/lib cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/usr/lib/libjson-c.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c/usr/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c/usr/lib/libjson-c.so.5.1.0: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-mips_24kc/libjson-c into /home/build/openwrt/bin/targets/ath79/generic/packages/libjson-c5_0.15-2_mips_24kc.ipk echo "libjson-c" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libjson-c.default.install rm -rf /home/build/openwrt/tmp/stage-libjson-c mkdir -p /home/build/openwrt/tmp/stage-libjson-c/host /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages install -d -m0755 /home/build/openwrt/tmp/stage-libjson-c cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/ipkg-install/* /home/build/openwrt/tmp/stage-libjson-c/ /home/build/openwrt/staging_dir/host/bin/sed -i -e 's,/usr/include,${prefix}/include,g' /home/build/openwrt/tmp/stage-libjson-c/usr/lib/pkgconfig/json-c.pc /home/build/openwrt/staging_dir/host/bin/sed -i -e 's,/usr/lib,${exec_prefix}/lib,g' /home/build/openwrt/tmp/stage-libjson-c/usr/lib/pkgconfig/json-c.pc find /home/build/openwrt/tmp/stage-libjson-c -name '*.la' | xargs -r rm -f; find /home/build/openwrt/tmp/stage-libjson-c -name '*.la' | xargs -r rm -f; if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libjson-c.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libjson-c.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi if [ -d /home/build/openwrt/tmp/stage-libjson-c ]; then (cd /home/build/openwrt/tmp/stage-libjson-c; find ./ > /home/build/openwrt/tmp/stage-libjson-c.files); SHELL= flock /home/build/openwrt/tmp/.staging-dir.flock -c ' mv /home/build/openwrt/tmp/stage-libjson-c.files /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libjson-c.list && cp -fpR /home/build/openwrt/tmp/stage-libjson-c/* /home/build/openwrt/staging_dir/target-mips_24kc_musl/; '; fi rm -rf /home/build/openwrt/tmp/stage-libjson-c touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.libjson-c_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/json-c-0.15 -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/libs/libjson-c' time: package/feeds/base/libjson-c/compile#6.21#3.15#10.89 make[3]: Entering directory '/home/build/openwrt/feeds/base/package/utils/lua' mkdir -p /home/build/openwrt/dl SHELL= flock /home/build/openwrt/tmp/.lua-5.1.5.tar.gz.flock -c ' /home/build/openwrt/scripts/download.pl "/home/build/openwrt/dl" "lua-5.1.5.tar.gz" "2640fc56a795f29d28ef15e13c34a47e223960b0240e8cb0a82d9b0738695333" "" "http://www.lua.org/ftp/" "http://www.tecgraf.puc-rio.br/lua/ftp/" ' + curl -f --connect-timeout 20 --retry 5 --location --insecure http://www.lua.org/ftp/lua-5.1.5.tar.gz % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 53 216k 53 115k 0 0 119k 0 0:00:01 --:--:-- 0:00:01 119k 100 216k 100 216k 0 0 203k 0 0:00:01 0:00:01 --:--:-- 203k touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.prepared_9df1cd2b77179a11c21f553844903598_18f1e190c5d53547fed41a3eaa76e9e9_check . /home/build/openwrt/include/shell.sh; gzip -dc /home/build/openwrt/dl/lua-5.1.5.tar.gz | tar -C /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.. -xf - [ ! -d ./src/ ] || cp -fpR ./src/. /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5 Applying ./patches/001-include-version-number.patch using plaintext: patching file Makefile patching file doc/lua5.1.1 (renamed from doc/lua.1) patching file doc/luac5.1.1 (renamed from doc/luac.1) patching file src/Makefile Applying ./patches/010-lua-5.1.3-lnum-full-260308.patch using plaintext: patching file src/Makefile patching file src/lapi.c patching file src/lapi.h patching file src/lauxlib.c patching file src/lauxlib.h patching file src/lbaselib.c patching file src/lcode.c patching file src/lcode.h patching file src/ldebug.c patching file src/ldo.c patching file src/ldump.c patching file src/liolib.c patching file src/llex.c patching file src/llex.h patching file src/llimits.h patching file src/lmathlib.c patching file src/lnum.c patching file src/lnum.h patching file src/lnum_config.h patching file src/lobject.c patching file src/lobject.h patching file src/loslib.c patching file src/lparser.c patching file src/lparser.h patching file src/lstrlib.c patching file src/ltable.c patching file src/ltable.h patching file src/ltm.c patching file src/lua.c patching file src/lua.h patching file src/luaconf.h patching file src/lundump.c patching file src/lvm.c patching file src/lvm.h patching file src/print.c Applying ./patches/011-lnum-use-double.patch using plaintext: patching file src/lnum_config.h Applying ./patches/012-lnum-fix-ltle-relational-operators.patch using plaintext: patching file src/lvm.c Applying ./patches/013-lnum-strtoul-parsing-fixes.patch using plaintext: patching file src/lnum.c patching file src/lnum_config.h Applying ./patches/015-lnum-ppc-compat.patch using plaintext: patching file src/lua.h Applying ./patches/020-shared_liblua.patch using plaintext: patching file Makefile patching file src/ldo.h patching file src/lfunc.h patching file src/lmem.h patching file src/lstring.h patching file src/lundump.h patching file src/Makefile Applying ./patches/030-archindependent-bytecode.patch using plaintext: patching file src/ldump.c patching file src/lundump.c Applying ./patches/040-use-symbolic-functions.patch using plaintext: patching file src/Makefile Applying ./patches/050-honor-cflags.patch using plaintext: patching file src/Makefile Applying ./patches/100-no_readline.patch using plaintext: patching file src/luaconf.h patching file src/Makefile Applying ./patches/200-lua-path.patch using plaintext: patching file src/luaconf.h Applying ./patches/300-opcode_performance.patch using plaintext: patching file src/lvm.c touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.prepared_9df1cd2b77179a11c21f553844903598_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.lua_installed touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.built_check make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5 CC="mips-openwrt-linux-musl-gcc" AR="mips-openwrt-linux-musl-ar rcu" RANLIB="mips-openwrt-linux-musl-ranlib" INSTALL_ROOT=/usr CFLAGS="-I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99" MYLDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " PKG_VERSION=5.1.5 linux make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5' cd src && make linux V=5.1 make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/src' make all MYCFLAGS+="-DLUA_USE_LINUX " MYLIBS="-Wl,-E -ldl " make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/src' mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lapi.o lapi.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lcode.o lcode.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ldebug.o ldebug.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ldo.o ldo.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ldump.o ldump.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lfunc.o lfunc.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lgc.o lgc.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o llex.o llex.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lmem.o lmem.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lobject.o lobject.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lopcodes.o lopcodes.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lparser.o lparser.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lstate.o lstate.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lstring.o lstring.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ltable.o ltable.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ltm.o ltm.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lundump.o lundump.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lvm.o lvm.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lzio.o lzio.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lnum.o lnum.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lauxlib.o lauxlib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lbaselib.o lbaselib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ldblib.o ldblib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o liolib.o liolib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lmathlib.o lmathlib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o loslib.o loslib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o ltablib.o ltablib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lstrlib.o lstrlib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o loadlib.o loadlib.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o linit.o linit.c mips-openwrt-linux-musl-ar rcu liblua.a lapi.o lcode.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o lnum.o lauxlib.o lbaselib.o ldblib.o liolib.o lmathlib.o loslib.o ltablib.o lstrlib.o loadlib.o linit.o # DLL needs all object files /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-ar: `u' modifier ignored since `D' is the default (see `U') mips-openwrt-linux-musl-ranlib liblua.a mips-openwrt-linux-musl-gcc -o liblua.so.5.1.5 -Wl,-Bsymbolic-functions -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -shared -Wl,-soname="liblua.so.5.1.5" lapi.o lcode.o ldebug.o ldo.o ldump.o lfunc.o lgc.o llex.o lmem.o lobject.o lopcodes.o lparser.o lstate.o lstring.o ltable.o ltm.o lundump.o lvm.o lzio.o lnum.o lauxlib.o lbaselib.o ldblib.o liolib.o lmathlib.o loslib.o ltablib.o lstrlib.o loadlib.o linit.o ln -fs liblua.so.5.1.5 liblua.so mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o lua.o lua.c mips-openwrt-linux-musl-gcc -o lua5.1 -L. -llua -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro lua.o -lm -Wl,-E -ldl mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o luac.o luac.c mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5=lua-5.1.5 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -DLUA_USE_LINUX -fpic -std=gnu99 -c -o print.o print.c mips-openwrt-linux-musl-gcc -o luac5.1 -L. -llua -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro luac.o print.o lopcodes.o -lm -Wl,-E -ldl make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/src' make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/src' make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5' rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5 INSTALL_TOP="/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr" install make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5' cd src && mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/man/man1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/share/lua/5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib/lua/5.1 cd src && install -p -m 0755 lua5.1 luac5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin cd src && install -p -m 0644 lua.h luaconf.h lualib.h lauxlib.h ../etc/lua.hpp lnum_config.h /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include cd src && install -p -m 0644 liblua.a liblua.so.5.1.5 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib ln -s liblua.so.5.1.5 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib/liblua.so cd doc && install -p -m 0644 lua5.1.1 luac5.1.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/man/man1 make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5' touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua/usr/lib cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib/liblua.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua/usr/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '5.1.5' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/liblua.version || echo '5.1.5' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/liblua.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/liblua/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.liblua_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua/usr/lib cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib/liblua.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua/usr/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua/usr/lib/liblua.so.5.1.5: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/liblua into /home/build/openwrt/bin/targets/ath79/generic/packages/liblua5.1.5_5.1.5-9_mips_24kc.ipk rm -rf /home/build/openwrt/tmp/stage-lua mkdir -p /home/build/openwrt/tmp/stage-lua/host /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages install -d -m0755 /home/build/openwrt/tmp/stage-lua/usr/include cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include/lua{,lib,conf}.h /home/build/openwrt/tmp/stage-lua/usr/include/ cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include/lua.hpp /home/build/openwrt/tmp/stage-lua/usr/include/ cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include/lauxlib.h /home/build/openwrt/tmp/stage-lua/usr/include/ cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/include/lnum_config.h /home/build/openwrt/tmp/stage-lua/usr/include/ install -d -m0755 /home/build/openwrt/tmp/stage-lua/usr/lib cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/lib/liblua.{a,so*} /home/build/openwrt/tmp/stage-lua/usr/lib/ ln -sf liblua.so.5.1.5 /home/build/openwrt/tmp/stage-lua/usr/lib/liblualib.so install -d -m0755 /home/build/openwrt/tmp/stage-lua/usr/lib/pkgconfig cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/etc/lua.pc /home/build/openwrt/tmp/stage-lua/usr/lib/pkgconfig/ find /home/build/openwrt/tmp/stage-lua -name '*.la' | xargs -r rm -f; find /home/build/openwrt/tmp/stage-lua -name '*.la' | xargs -r rm -f; if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/lua.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/lua.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi if [ -d /home/build/openwrt/tmp/stage-lua ]; then (cd /home/build/openwrt/tmp/stage-lua; find ./ > /home/build/openwrt/tmp/stage-lua.files); SHELL= flock /home/build/openwrt/tmp/.staging-dir.flock -c ' mv /home/build/openwrt/tmp/stage-lua.files /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/lua.list && cp -fpR /home/build/openwrt/tmp/stage-lua/* /home/build/openwrt/staging_dir/target-mips_24kc_musl/; '; fi rm -rf /home/build/openwrt/tmp/stage-lua touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.lua_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin/lua5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/usr/bin/ ln -sf lua5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/usr/bin/lua find /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/usr/bin/lua5.1: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua into /home/build/openwrt/bin/targets/ath79/generic/packages/lua_5.1.5-9_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin/lua5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua/usr/bin/ ln -sf lua5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua/usr/bin/lua touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua.installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin/luac5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/usr/bin/ ln -sf luac5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/usr/bin/luac find /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/usr/bin/luac5.1: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/luac into /home/build/openwrt/bin/targets/ath79/generic/packages/luac_5.1.5-9_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-install/usr/bin/luac5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac/usr/bin/ ln -sf luac5.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac/usr/bin/luac touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac.installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples/usr/share/lua/examples install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/test/*.lua /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples/usr/share/lua/examples/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/ipkg-mips_24kc/lua-examples into /home/build/openwrt/bin/targets/ath79/generic/packages/lua-examples_5.1.5-9_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples/usr/share/lua/examples install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/test/*.lua /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples/usr/share/lua/examples/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.lua_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/luac/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.luac_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.pkgdir/lua-examples/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.lua-examples_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/lua-5.1.5 -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/utils/lua' time: package/feeds/base/lua/compile#5.67#1.39#10.93 make[3]: Entering directory '/home/build/openwrt/feeds/base/package/libs/libubox' mkdir -p /home/build/openwrt/dl SHELL= flock /home/build/openwrt/tmp/.libubox-2021-05-16-b14c4688.tar.xz.flock -c ' /home/build/openwrt/scripts/download.pl "/home/build/openwrt/dl" "libubox-2021-05-16-b14c4688.tar.xz" "7dd1db1e0074a9c7c722db654cce3111b3bd3cff0bfd791c4497cb0f6c22d3ca" "" || ( echo "Checking out files from the git repository..."; mkdir -p /home/build/openwrt/tmp/dl && cd /home/build/openwrt/tmp/dl && rm -rf libubox-2021-05-16-b14c4688 && [ \! -d libubox-2021-05-16-b14c4688 ] && git clone https://git.openwrt.org/project/libubox.git libubox-2021-05-16-b14c4688 && (cd libubox-2021-05-16-b14c4688 && git checkout b14c4688612c05c78ce984d7bde633bce8703b1e && git submodule update --init --recursive) && echo "Packing checkout..." && export TAR_TIMESTAMP=`cd libubox-2021-05-16-b14c4688 && git log -1 --format='\''@%ct'\''` && rm -rf libubox-2021-05-16-b14c4688/.git && tar --numeric-owner --owner=0 --group=0 --mode=a-s --sort=name ${TAR_TIMESTAMP:+--mtime="$TAR_TIMESTAMP"} -c libubox-2021-05-16-b14c4688 | xz -zc -7e > /home/build/openwrt/tmp/dl/libubox-2021-05-16-b14c4688.tar.xz && mv /home/build/openwrt/tmp/dl/libubox-2021-05-16-b14c4688.tar.xz /home/build/openwrt/dl/ && rm -rf libubox-2021-05-16-b14c4688; ) ' + curl -f --connect-timeout 20 --retry 5 --location --insecure https://sources.openwrt.org/libubox-2021-05-16-b14c4688.tar.xz % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 69256 100 69256 0 0 80250 0 --:--:-- --:--:-- --:--:-- 80250 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.prepared_097ccdef62a6a274a655920cead7ecd2_18f1e190c5d53547fed41a3eaa76e9e9_check . /home/build/openwrt/include/shell.sh; xzcat /home/build/openwrt/dl/libubox-2021-05-16-b14c4688.tar.xz | tar -C /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.. -xf - [ ! -d ./src/ ] || cp -fpR ./src/. /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.prepared_097ccdef62a6a274a655920cead7ecd2_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.libubox_installed mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688; CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " cmake -DCMAKE_SYSTEM_NAME=Linux -DCMAKE_SYSTEM_VERSION=1 -DCMAKE_SYSTEM_PROCESSOR=mips -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_FLAGS_RELEASE="-DNDEBUG" -DCMAKE_CXX_FLAGS_RELEASE="-DNDEBUG" -DCMAKE_C_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc" -DCMAKE_C_COMPILER_ARG1="" -DCMAKE_CXX_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-g++" -DCMAKE_CXX_COMPILER_ARG1="" -DCMAKE_ASM_COMPILER="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc" -DCMAKE_ASM_COMPILER_ARG1="" -DCMAKE_EXE_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro" -DCMAKE_MODULE_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,-Bsymbolic-functions" -DCMAKE_SHARED_LINKER_FLAGS:STRING="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,-Bsymbolic-functions" -DCMAKE_AR="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-ar" -DCMAKE_NM="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-nm" -DCMAKE_RANLIB="/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc-ranlib" -DCMAKE_FIND_ROOT_PATH="/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr;/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl" -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=BOTH -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY -DCMAKE_FIND_ROOT_PATH_MODE_INCLUDE=ONLY -DCMAKE_STRIP=: -DCMAKE_INSTALL_PREFIX=/usr -DDL_LIBRARY=/home/build/openwrt/staging_dir/target-mips_24kc_musl -DCMAKE_PREFIX_PATH=/home/build/openwrt/staging_dir/target-mips_24kc_musl -DCMAKE_SKIP_RPATH=TRUE -DCMAKE_EXPORT_PACKAGE_REGISTRY=FALSE -DCMAKE_EXPORT_NO_PACKAGE_REGISTRY=TRUE -DCMAKE_FIND_USE_PACKAGE_REGISTRY=FALSE -DCMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY=TRUE -DCMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY=FALSE -DCMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY=TRUE -DLUAPATH=/usr/lib/lua -DABIVERSION="20210623" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 ) -- The C compiler identification is GNU 8.4.0 -- Detecting C compiler ABI info -- Detecting C compiler ABI info - done -- Check for working C compiler: /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc - skipped -- Detecting C compile features -- Detecting C compile features - done -- Found PkgConfig: /home/build/openwrt/staging_dir/host/bin/pkg-config (found version "1.6.3") -- Checking for one of the modules 'json-c' -- Looking for clock_gettime -- Looking for clock_gettime - found -- Checking for one of the modules 'lua5.1;lua-5.1' -- Configuring done -- Generating done CMake Warning: Manually-specified variables were not used by the project: CMAKE_ASM_COMPILER CMAKE_ASM_COMPILER_ARG1 CMAKE_CXX_COMPILER CMAKE_CXX_COMPILER_ARG1 CMAKE_CXX_FLAGS_RELEASE CMAKE_EXPORT_NO_PACKAGE_REGISTRY CMAKE_EXPORT_PACKAGE_REGISTRY CMAKE_FIND_PACKAGE_NO_PACKAGE_REGISTRY CMAKE_FIND_PACKAGE_NO_SYSTEM_PACKAGE_REGISTRY CMAKE_FIND_USE_PACKAGE_REGISTRY CMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY DL_LIBRARY -- Build files have been written to: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.built_check CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " make -j1 -C /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/. AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CROSS="mips-openwrt-linux-musl-" ARCH="mips" CMAKE_COMMAND='/home/build/openwrt/staging_dir/host/bin/cmake' CMAKE_DISABLE_cmake_check_build_system=1 ; make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target blobmsg_json-static make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 2%] Building C object CMakeFiles/blobmsg_json-static.dir/blobmsg_json.c.o [ 4%] Linking C static library libblobmsg_json.a make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 4%] Built target blobmsg_json-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target ubox-static make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 6%] Building C object CMakeFiles/ubox-static.dir/avl.c.o [ 8%] Building C object CMakeFiles/ubox-static.dir/avl-cmp.c.o [ 10%] Building C object CMakeFiles/ubox-static.dir/blob.c.o [ 12%] Building C object CMakeFiles/ubox-static.dir/blobmsg.c.o [ 14%] Building C object CMakeFiles/ubox-static.dir/uloop.c.o [ 16%] Building C object CMakeFiles/ubox-static.dir/usock.c.o [ 18%] Building C object CMakeFiles/ubox-static.dir/ustream.c.o [ 20%] Building C object CMakeFiles/ubox-static.dir/ustream-fd.c.o [ 22%] Building C object CMakeFiles/ubox-static.dir/vlist.c.o [ 25%] Building C object CMakeFiles/ubox-static.dir/utils.c.o [ 27%] Building C object CMakeFiles/ubox-static.dir/safe_list.c.o [ 29%] Building C object CMakeFiles/ubox-static.dir/runqueue.c.o [ 31%] Building C object CMakeFiles/ubox-static.dir/md5.c.o [ 33%] Building C object CMakeFiles/ubox-static.dir/kvlist.c.o [ 35%] Building C object CMakeFiles/ubox-static.dir/ulog.c.o [ 37%] Building C object CMakeFiles/ubox-static.dir/base64.c.o [ 39%] Linking C static library libubox.a make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 39%] Built target ubox-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target ubox make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 41%] Building C object CMakeFiles/ubox.dir/avl.c.o [ 43%] Building C object CMakeFiles/ubox.dir/avl-cmp.c.o [ 45%] Building C object CMakeFiles/ubox.dir/blob.c.o [ 47%] Building C object CMakeFiles/ubox.dir/blobmsg.c.o [ 50%] Building C object CMakeFiles/ubox.dir/uloop.c.o [ 52%] Building C object CMakeFiles/ubox.dir/usock.c.o [ 54%] Building C object CMakeFiles/ubox.dir/ustream.c.o [ 56%] Building C object CMakeFiles/ubox.dir/ustream-fd.c.o [ 58%] Building C object CMakeFiles/ubox.dir/vlist.c.o [ 60%] Building C object CMakeFiles/ubox.dir/utils.c.o [ 62%] Building C object CMakeFiles/ubox.dir/safe_list.c.o [ 64%] Building C object CMakeFiles/ubox.dir/runqueue.c.o [ 66%] Building C object CMakeFiles/ubox.dir/md5.c.o [ 68%] Building C object CMakeFiles/ubox.dir/kvlist.c.o [ 70%] Building C object CMakeFiles/ubox.dir/ulog.c.o [ 72%] Building C object CMakeFiles/ubox.dir/base64.c.o [ 75%] Linking C shared library libubox.so make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 75%] Built target ubox make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target blobmsg_json make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 77%] Building C object CMakeFiles/blobmsg_json.dir/blobmsg_json.c.o [ 79%] Linking C shared library libblobmsg_json.so make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 79%] Built target blobmsg_json make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target jshn make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 81%] Building C object CMakeFiles/jshn.dir/jshn.c.o [ 83%] Linking C executable jshn make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 83%] Built target jshn make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target json_script make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 85%] Building C object CMakeFiles/json_script.dir/json_script.c.o [ 87%] Linking C shared library libjson_script.so make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 87%] Built target json_script make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target uloop_lua make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 89%] Building C object lua/CMakeFiles/uloop_lua.dir/uloop.c.o [ 91%] Linking C shared module uloop.so make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 91%] Built target uloop_lua make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target ustream-example make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 93%] Building C object examples/CMakeFiles/ustream-example.dir/ustream-example.c.o [ 95%] Linking C executable ustream-example make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 95%] Built target ustream-example make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Scanning dependencies of target json_script-example make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 97%] Building C object examples/CMakeFiles/json_script-example.dir/json_script-example.c.o [100%] Linking C executable json_script-example make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [100%] Built target json_script-example make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/. AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688=libubox-2021-05-16-b14c4688 -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CROSS="mips-openwrt-linux-musl-" ARCH="mips" CMAKE_COMMAND='/home/build/openwrt/staging_dir/host/bin/cmake' CMAKE_DISABLE_cmake_check_build_system=1 DESTDIR="/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install" install; make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 4%] Built target blobmsg_json-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 39%] Built target ubox-static make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 75%] Built target ubox make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 79%] Built target blobmsg_json make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 83%] Built target jshn make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 87%] Built target json_script make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 91%] Built target uloop_lua make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [ 95%] Built target ustream-example make[6]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' make[6]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' [100%] Built target json_script-example make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' Install the project... -- Install configuration: "Release" -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/assert.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/avl-cmp.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/avl.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/blob.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/blobmsg.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/blobmsg_json.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/json_script.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/kvlist.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/list.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/md5.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/runqueue.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/safe_list.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/ulog.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/uloop.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/usock.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/ustream.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/utils.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/include/libubox/vlist.h -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libubox.so.20210623 -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libubox.so -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libubox.a -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libblobmsg_json.so.20210623 -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libblobmsg_json.so -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libblobmsg_json.a -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/bin/jshn -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libjson_script.so.20210623 -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libjson_script.so -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/share/libubox/jshn.sh -- Installing: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/lua/uloop.so make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688' touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libubox.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '20210623' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.version || echo '20210623' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libubox_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libubox.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox/lib/libubox.so.20210623: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox into /home/build/openwrt/bin/targets/ath79/generic/packages/libubox_2021-05-16-b14c4688-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libblobmsg_json.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json/lib/libblobmsg_json.so.20210623: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libblobmsg-json into /home/build/openwrt/bin/targets/ath79/generic/packages/libblobmsg-json_2021-05-16-b14c4688-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/usr/bin /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/usr/share/libubox install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/bin/jshn /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/usr/bin install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/share/libubox/jshn.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/usr/share/libubox find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/usr/bin/jshn: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/jshn into /home/build/openwrt/bin/targets/ath79/generic/packages/jshn_2021-05-16-b14c4688-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libjson_script.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script/lib/libjson_script.so.20210623: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libjson-script into /home/build/openwrt/bin/targets/ath79/generic/packages/libjson-script_2021-05-16-b14c4688-2_mips_24kc.ipk mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua/usr/lib/lua cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/lua/uloop.so /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua/usr/lib/lua/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua/usr/lib/lua/uloop.so: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-mips_24kc/libubox-lua into /home/build/openwrt/bin/targets/ath79/generic/packages/libubox-lua_2021-05-16-b14c4688-2_mips_24kc.ipk echo "libubox" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.default.install rm -rf /home/build/openwrt/tmp/stage-libubox mkdir -p /home/build/openwrt/tmp/stage-libubox/host /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages install -d -m0755 /home/build/openwrt/tmp/stage-libubox cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/* /home/build/openwrt/tmp/stage-libubox/ find /home/build/openwrt/tmp/stage-libubox -name '*.la' | xargs -r rm -f; find /home/build/openwrt/tmp/stage-libubox -name '*.la' | xargs -r rm -f; if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libubox.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libubox.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi if [ -d /home/build/openwrt/tmp/stage-libubox ]; then (cd /home/build/openwrt/tmp/stage-libubox; find ./ > /home/build/openwrt/tmp/stage-libubox.files); SHELL= flock /home/build/openwrt/tmp/.staging-dir.flock -c ' mv /home/build/openwrt/tmp/stage-libubox.files /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/libubox.list && cp -fpR /home/build/openwrt/tmp/stage-libubox/* /home/build/openwrt/staging_dir/target-mips_24kc_musl/; '; fi rm -rf /home/build/openwrt/tmp/stage-libubox touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.libubox_installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libblobmsg_json.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn/usr/bin /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn/usr/share/libubox install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/bin/jshn /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn/usr/bin install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/share/libubox/jshn.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn/usr/share/libubox touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/ipkg-install/usr/lib/libjson_script.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua/usr/lib/lua cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/lua/uloop.so /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua/usr/lib/lua/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '20210623' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libblobmsg-json.version || echo '20210623' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libblobmsg-json.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libblobmsg-json/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libblobmsg-json_installed echo "libblobmsg-json" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/jshn/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.jshn_installed echo "jshn" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '20210623' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libjson-script.version || echo '20210623' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libjson-script.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libjson-script/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libjson-script_installed echo "libjson-script" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libubox.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.pkgdir/libubox-lua/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libubox-lua_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/libubox-2021-05-16-b14c4688 -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/libs/libubox' time: package/feeds/base/libubox/compile#6.20#2.42#13.00 make[2]: Leaving directory '/home/build/openwrt' make[1]: Leaving directory '/home/build/openwrt' make[1]: Entering directory '/home/build/openwrt' make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' make[2]: Leaving directory '/home/build/openwrt' tmp/.config-package.in:67:warning: ignoring type redefinition of 'PACKAGE_libc' from 'bool' to 'tristate' tmp/.config-package.in:95:warning: ignoring type redefinition of 'PACKAGE_libgcc' from 'bool' to 'tristate' tmp/.config-package.in:211:warning: ignoring type redefinition of 'PACKAGE_libpthread' from 'bool' to 'tristate' tmp/.config-package.in:637:warning: ignoring type redefinition of 'PACKAGE_libblobmsg-json' from 'bool' to 'tristate' tmp/.config-package.in:650:warning: ignoring type redefinition of 'PACKAGE_libjson-c' from 'bool' to 'tristate' tmp/.config-package.in:677:warning: ignoring type redefinition of 'PACKAGE_libubox' from 'bool' to 'tristate' tmp/.config-package.in:721:warning: ignoring type redefinition of 'PACKAGE_jshn' from 'bool' to 'tristate' tmp/.config-package.in:791:warning: ignoring type redefinition of 'PACKAGE_libjson-script' from 'bool' to 'tristate' # # No change to .config # make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt/feeds/base/package/libs/openssl' bash: md5: command not found rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.openssl_installed rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/openssl.list make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/libs/openssl' time: package/feeds/base/openssl/clean#0.09#0.07#0.18 make[2]: Leaving directory '/home/build/openwrt' make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' WARNING: Makefile 'package/linux/Makefile' has a dependency on 'eip197-mini-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'r8169-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'e100-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'bnx2x-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'ar3k-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'mwifiex-sdio-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb2', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'edgeport-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'kmod-phy-bcm-ns-usb3', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'amdgpu-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'radeon-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'prism54-firmware', which does not exist WARNING: Makefile 'package/linux/Makefile' has a dependency on 'rtl8192su-firmware', which does not exist make[3]: Entering directory '/home/build/openwrt' make[3]: Leaving directory '/home/build/openwrt' make[2]: Leaving directory '/home/build/openwrt' tmp/.config-package.in:67:warning: ignoring type redefinition of 'PACKAGE_libc' from 'bool' to 'tristate' tmp/.config-package.in:95:warning: ignoring type redefinition of 'PACKAGE_libgcc' from 'bool' to 'tristate' tmp/.config-package.in:211:warning: ignoring type redefinition of 'PACKAGE_libpthread' from 'bool' to 'tristate' tmp/.config-package.in:637:warning: ignoring type redefinition of 'PACKAGE_libblobmsg-json' from 'bool' to 'tristate' tmp/.config-package.in:650:warning: ignoring type redefinition of 'PACKAGE_libjson-c' from 'bool' to 'tristate' tmp/.config-package.in:677:warning: ignoring type redefinition of 'PACKAGE_libubox' from 'bool' to 'tristate' tmp/.config-package.in:721:warning: ignoring type redefinition of 'PACKAGE_jshn' from 'bool' to 'tristate' tmp/.config-package.in:791:warning: ignoring type redefinition of 'PACKAGE_libjson-script' from 'bool' to 'tristate' # # No change to .config # make[2]: Entering directory '/home/build/openwrt' make[3]: Entering directory '/home/build/openwrt/package/toolchain' Makefile:762: WARNING: skipping libgomp -- package has no install section touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.prepared_7f1b47944ccd427bc40bcb08f4c15a24_18f1e190c5d53547fed41a3eaa76e9e9_check mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.prepared_7f1b47944ccd427bc40bcb08f4c15a24_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.toolchain_installed (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/./; if [ -x ./configure ]; then find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.guess | xargs -r chmod u+w; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.guess | xargs -r -n1 cp --remove-destination /home/build/openwrt/scripts/config.guess; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.sub | xargs -r chmod u+w; find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ -name config.sub | xargs -r -n1 cp --remove-destination /home/build/openwrt/scripts/config.sub; AR="mips-openwrt-linux-musl-gcc-ar" AS="mips-openwrt-linux-musl-gcc -c -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro" LD=mips-openwrt-linux-musl-ld NM="mips-openwrt-linux-musl-gcc-nm" CC="mips-openwrt-linux-musl-gcc" GCC="mips-openwrt-linux-musl-gcc" CXX="mips-openwrt-linux-musl-g++" RANLIB="mips-openwrt-linux-musl-gcc-ranlib" STRIP=mips-openwrt-linux-musl-strip OBJCOPY=mips-openwrt-linux-musl-objcopy OBJDUMP=mips-openwrt-linux-musl-objdump SIZE=mips-openwrt-linux-musl-size CFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " CXXFLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -mips16 -minterlink-mips16 -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain=toolchain -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro " CPPFLAGS="-I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include " LDFLAGS="-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro " ./configure --target=mips-openwrt-linux --host=mips-openwrt-linux --build=x86_64-pc-linux-gnu --program-prefix="" --program-suffix="" --prefix=/usr --exec-prefix=/usr --bindir=/usr/bin --sbindir=/usr/sbin --libexecdir=/usr/lib --sysconfdir=/etc --datadir=/usr/share --localstatedir=/var --mandir=/usr/man --infodir=/usr/info --disable-nls ; fi; ) touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built_check cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.1 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/usr/bin cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/ld-musl-*.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ ln -sf ../../lib/libc.so /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/usr/bin/ldd cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libcrypt.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++fs.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libm.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libresolv.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libssp_nonshared.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libsupc++.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libutil.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libxnet.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libdl.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libpthread.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/librt.a /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.a /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/gcc/mips-openwrt-linux-musl/8.4.0/libgcc_pic.a /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/libgcc_s_pic.a; cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/gcc/mips-openwrt-linux-musl/8.4.0/libgcc.map /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/lib/libgcc_s_pic.map touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libc_installed removed '/home/build/openwrt/bin/targets/ath79/generic/packages/libgcc1_8.4.0-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/lib/libgcc_s.so.1: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libgcc into /home/build/openwrt/bin/targets/ath79/generic/packages/libgcc1_8.4.0-2_mips_24kc.ipk removed '/home/build/openwrt/bin/targets/ath79/generic/packages/libc_1.1.24-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/usr/bin cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/ld-musl-*.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/ cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libc.so* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/ ln -sf ../../lib/libc.so /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/usr/bin/ldd find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/lib/libc.so: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libc into /home/build/openwrt/bin/targets/ath79/generic/packages/libc_1.1.24-2_mips_24kc.ipk removed '/home/build/openwrt/bin/targets/ath79/generic/packages/libatomic1_8.4.0-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/lib/libatomic.so.1.2.0: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libatomic into /home/build/openwrt/bin/targets/ath79/generic/packages/libatomic1_8.4.0-2_mips_24kc.ipk removed '/home/build/openwrt/bin/targets/ath79/generic/packages/libstdcpp6_8.4.0-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/usr/lib/libstdc++.so.6.0.25: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libstdcpp into /home/build/openwrt/bin/targets/ath79/generic/packages/libstdcpp6_8.4.0-2_mips_24kc.ipk removed '/home/build/openwrt/bin/targets/ath79/generic/packages/libpthread_1.1.24-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/lib find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/libpthread into /home/build/openwrt/bin/targets/ath79/generic/packages/libpthread_1.1.24-2_mips_24kc.ipk removed '/home/build/openwrt/bin/targets/ath79/generic/packages/librt_1.1.24-2_mips_24kc.ipk' mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/lib find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/ipkg-mips_24kc/librt into /home/build/openwrt/bin/targets/ath79/generic/packages/librt_1.1.24-2_mips_24kc.ipk echo "libc" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libgcc_s.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libatomic.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/usr/lib cp -fpR /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib/libstdc++.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/usr/lib/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread/lib touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt/lib touch /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '1' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libgcc.version || echo '1' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libgcc.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libgcc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libgcc_installed echo "libgcc" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '1' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libatomic.version || echo '1' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libatomic.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libatomic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libatomic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '6' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libstdcpp.version || echo '6' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libstdcpp.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libstdcpp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libstdcpp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/libpthread/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libpthread_installed echo "libpthread" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/toolchain.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.pkgdir/librt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.librt_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/toolchain -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/package/toolchain' time: package/toolchain/compile#0.85#0.75#5.64 make[3]: Entering directory '/home/build/openwrt/package/linux' rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.linux_installed ( if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/linux.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/linux.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi; ) touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.built_b4a2cf9981ed18a7d6f3869843f66faa touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.built_b4a2cf9981ed18a7d6f3869843f66faa_check touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.built_b4a2cf9981ed18a7d6f3869843f66faa rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kernel.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kernel mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kernel # nothing to do touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kernel.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kernel/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kernel_installed Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kernel into /home/build/openwrt/bin/targets/ath79/generic/packages/kernel_5.4.63-1-cb83e978d195bd392f1288c45dce0165_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-aoe/lib/modules/5.4.63/aoe.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-aoe into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-aoe_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-core/lib/modules/5.4.63/sd_mod.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-core/lib/modules/5.4.63/scsi_mod.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-scsi-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-core/lib/modules/5.4.63/libata.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-core/lib/modules/5.4.63/libahci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-ahci/lib/modules/5.4.63/ahci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-ahci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-ahci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-artop/lib/modules/5.4.63/pata_artop.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-artop into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-artop_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-marvell-sata/lib/modules/5.4.63/sata_mv.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-marvell-sata into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-marvell-sata_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-nvidia-sata/lib/modules/5.4.63/sata_nv.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-nvidia-sata into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-nvidia-sata_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-pdc202xx-old/lib/modules/5.4.63/pata_pdc202xx_old.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-pdc202xx-old into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-pdc202xx-old_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-piix/lib/modules/5.4.63/ata_piix.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-piix into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-piix_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-sil/lib/modules/5.4.63/sata_sil.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-sil into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-sil_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-sil24/lib/modules/5.4.63/sata_sil24.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-sil24 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-sil24_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-via-sata/lib/modules/5.4.63/sata_via.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ata-via-sata into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ata-via-sata_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-block2mtd/lib/modules/5.4.63/block2mtd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-block2mtd into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-block2mtd_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dax/lib/modules/5.4.63/dax.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dax into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dax_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hash/lib/modules/5.4.63/crypto_hash.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hash into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hash_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-null/lib/modules/5.4.63/crypto_null.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-null into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-null_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-aead/lib/modules/5.4.63/aead.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-aead into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-aead_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-pcompress into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-pcompress_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-manager/lib/modules/5.4.63/cryptomgr.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-manager into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-manager_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm/lib/modules/5.4.63/dm-log.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm/lib/modules/5.4.63/dm-mirror.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm/lib/modules/5.4.63/dm-crypt.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm/lib/modules/5.4.63/dm-mod.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm/lib/modules/5.4.63/dm-region-hash.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-mod/lib/modules/5.4.63/md-mod.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-mod into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-mod_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid0/lib/modules/5.4.63/raid0.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid0 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-raid0_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid1/lib/modules/5.4.63/raid1.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid1 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-raid1_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid10/lib/modules/5.4.63/raid10.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid10 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-raid10_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-crc32c/lib/modules/5.4.63/crc32c_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-crc32c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-crc32c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc32c/lib/modules/5.4.63/libcrc32c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc32c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc32c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-raid6/lib/modules/5.4.63/raid6_pq.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-raid6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-raid6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-xor/lib/modules/5.4.63/xor.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-xor into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-xor_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/async_raid6_recov.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/async_pq.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/async_xor.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/async_tx.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/raid456.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456/lib/modules/5.4.63/async_memcpy.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-raid456 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-raid456_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm-raid/lib/modules/5.4.63/dm-raid.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm-raid into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dm-raid_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iscsi-initiator/lib/modules/5.4.63/scsi_transport_iscsi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iscsi-initiator/lib/modules/5.4.63/libiscsi_tcp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iscsi-initiator/lib/modules/5.4.63/iscsi_tcp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iscsi-initiator/lib/modules/5.4.63/libiscsi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iscsi-initiator into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iscsi-initiator_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-linear/lib/modules/5.4.63/linear.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-linear into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-linear_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-multipath/lib/modules/5.4.63/multipath.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-md-multipath into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-md-multipath_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-loop/lib/modules/5.4.63/loop.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-loop into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-loop_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nbd/lib/modules/5.4.63/nbd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nbd into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nbd_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-generic/lib/modules/5.4.63/sg.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-generic into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-scsi-generic_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-cdrom/lib/modules/5.4.63/sr_mod.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-cdrom/lib/modules/5.4.63/cdrom.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-cdrom into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-scsi-cdrom_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-tape/lib/modules/5.4.63/st.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-scsi-tape into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-scsi-tape_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iosched-bfq/lib/modules/5.4.63/bfq.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iosched-bfq into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iosched-bfq_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can/lib/modules/5.4.63/can-dev.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can/lib/modules/5.4.63/can.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-bcm/lib/modules/5.4.63/can-bcm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-bcm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-bcm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can/lib/modules/5.4.63/c_can.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-c-can_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can-pci/lib/modules/5.4.63/c_can_pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can-pci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-c-can-pci_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-regmap-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-regmap-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can-platform/lib/modules/5.4.63/c_can_platform.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-c-can-platform into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-c-can-platform_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-gw/lib/modules/5.4.63/can-gw.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-gw into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-gw_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-mcp251x/lib/modules/5.4.63/mcp251x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-mcp251x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-mcp251x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-raw/lib/modules/5.4.63/can-raw.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-raw into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-raw_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-slcan/lib/modules/5.4.63/slcan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-slcan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-slcan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-base/lib/modules/5.4.63/nls_base.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-base into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-base_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-core/lib/modules/5.4.63/usbcore.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-core/lib/modules/5.4.63/usb-common.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-8dev/lib/modules/5.4.63/usb_8dev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-8dev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-usb-8dev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-ems/lib/modules/5.4.63/ems_usb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-ems into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-usb-ems_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-esd/lib/modules/5.4.63/esd_usb2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-esd into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-usb-esd_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-kvaser/lib/modules/5.4.63/kvaser_usb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-kvaser into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-usb-kvaser_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-peak/lib/modules/5.4.63/peak_usb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-usb-peak into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-usb-peak_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-vcan/lib/modules/5.4.63/vcan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-can-vcan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-can-vcan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-acompress/lib/modules/5.4.63/crypto_acompress.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-acompress into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-acompress_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-arc4/lib/modules/5.4.63/arc4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-arc4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-arc4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-authenc/lib/modules/5.4.63/authenc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-authenc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-authenc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cbc/lib/modules/5.4.63/cbc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cbc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-cbc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hmac/lib/modules/5.4.63/hmac.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hmac into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hmac_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha256/lib/modules/5.4.63/libsha256.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha256/lib/modules/5.4.63/sha256_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha256 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-sha256_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/rng.ko' is built-in. rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rng/lib/modules/5.4.63/drbg.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rng/lib/modules/5.4.63/jitterentropy_rng.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rng into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-rng_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-seqiv/lib/modules/5.4.63/seqiv.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-seqiv into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-seqiv_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ctr/lib/modules/5.4.63/ctr.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ctr into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-ctr_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ccm/lib/modules/5.4.63/ccm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ccm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-ccm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cmac/lib/modules/5.4.63/cmac.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cmac into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-cmac_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-crc32/lib/modules/5.4.63/crc32_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-crc32 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-crc32_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cts/lib/modules/5.4.63/cts.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-cts into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-cts_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zlib-inflate/lib/modules/5.4.63/zlib_inflate.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zlib-inflate into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-zlib-inflate_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zlib-deflate/lib/modules/5.4.63/zlib_deflate.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zlib-deflate into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-zlib-deflate_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-deflate/lib/modules/5.4.63/deflate.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-deflate into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-deflate_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-des/lib/modules/5.4.63/libdes.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-des/lib/modules/5.4.63/des_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-des into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-des_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ecb/lib/modules/5.4.63/ecb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ecb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-ecb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-kpp/lib/modules/5.4.63/kpp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-kpp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-kpp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ecdh/lib/modules/5.4.63/ecdh_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ecdh/lib/modules/5.4.63/ecc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ecdh into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-ecdh_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-echainiv/lib/modules/5.4.63/echainiv.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-echainiv into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-echainiv_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-fcrypt/lib/modules/5.4.63/fcrypt.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-fcrypt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-fcrypt_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-gf128/lib/modules/5.4.63/gf128mul.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-gf128 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-gf128_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ghash/lib/modules/5.4.63/ghash-generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-ghash into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-ghash_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-gcm/lib/modules/5.4.63/gcm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-gcm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-gcm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-xcbc/lib/modules/5.4.63/xcbc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-xcbc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-xcbc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-asn1-decoder/lib/modules/5.4.63/asn1_decoder.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-asn1-decoder into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-asn1-decoder_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rsa/lib/modules/5.4.63/akcipher.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rsa/lib/modules/5.4.63/mpi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rsa/lib/modules/5.4.63/rsa_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rsa into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-rsa_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha1/lib/modules/5.4.63/sha1_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha1 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-sha1_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/hw_random/rng-core.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-random-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-random-core_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-ccp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hw-ccp_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-geode into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hw-geode_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-hifn-795x/lib/modules/5.4.63/hifn_795x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-hifn-795x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hw-hifn-795x_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-padlock into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hw-padlock_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-hw-talitos into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-hw-talitos_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-md4/lib/modules/5.4.63/md4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-md4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-md4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-md5/lib/modules/5.4.63/md5.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-md5 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-md5_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-michael-mic/lib/modules/5.4.63/michael_mic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-michael-mic into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-michael-mic_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-xts/lib/modules/5.4.63/xts.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-xts into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-xts_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/anubis.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/camellia_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/twofish_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/wp512.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/twofish_common.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/blowfish_common.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/cast5_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/tgr192.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/serpent_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/cast6_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/khazad.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/tea.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/blowfish_generic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc/lib/modules/5.4.63/cast_common.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-misc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-misc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-pcbc/lib/modules/5.4.63/pcbc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-pcbc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-pcbc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rmd160/lib/modules/5.4.63/rmd160.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-rmd160 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-rmd160_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha512/lib/modules/5.4.63/sha512_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-sha512 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-sha512_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-test/lib/modules/5.4.63/tcrypt.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-test into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-test_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/algif_skcipher.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/algif_aead.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/af_alg.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/algif_hash.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/algif_rng.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user/lib/modules/5.4.63/crypto_user.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-user into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-user_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-crypto-wq into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-crypto-wq_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc-itu-t/lib/modules/5.4.63/crc-itu-t.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc-itu-t into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc-itu-t_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire/lib/modules/5.4.63/firewire-core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-firewire_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-net/lib/modules/5.4.63/firewire-net.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-net into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-firewire-net_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-ohci/lib/modules/5.4.63/firewire-ohci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-ohci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-firewire-ohci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-sbp2/lib/modules/5.4.63/firewire-sbp2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-firewire-sbp2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-firewire-sbp2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-autofs4/lib/modules/5.4.63/autofs4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-autofs4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-autofs4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lzo/lib/modules/5.4.63/lzo_decompress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lzo/lib/modules/5.4.63/lzo_compress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lzo/lib/modules/5.4.63/lzo.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lzo into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-lzo_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zstd/lib/modules/5.4.63/xxhash.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zstd/lib/modules/5.4.63/zstd_compress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zstd/lib/modules/5.4.63/zstd_decompress.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-zstd into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-zstd_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-btrfs/lib/modules/5.4.63/btrfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-btrfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-btrfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-cifs/lib/modules/5.4.63/cifs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-cifs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-cifs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-configfs/lib/modules/5.4.63/configfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-configfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-configfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-cramfs/lib/modules/5.4.63/cramfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-cramfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-cramfs_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/exportfs/exportfs.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-exportfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-exportfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc16/lib/modules/5.4.63/crc16.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc16 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc16_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ext4/lib/modules/5.4.63/mbcache.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ext4/lib/modules/5.4.63/jbd2.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ext4/lib/modules/5.4.63/ext4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ext4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-ext4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-f2fs/lib/modules/5.4.63/f2fs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-f2fs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-f2fs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-fscache/lib/modules/5.4.63/fscache.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-fscache into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-fscache_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-hfs/lib/modules/5.4.63/hfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-hfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-hfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-utf8/lib/modules/5.4.63/nls_utf8.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-utf8 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-utf8_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-hfsplus/lib/modules/5.4.63/hfsplus.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-hfsplus into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-hfsplus_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-isofs/lib/modules/5.4.63/isofs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-isofs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-isofs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-jfs/lib/modules/5.4.63/jfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-jfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-jfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-minix/lib/modules/5.4.63/minix.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-minix into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-minix_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp437/lib/modules/5.4.63/nls_cp437.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp437 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp437_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-1/lib/modules/5.4.63/nls_iso8859-1.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-1 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-1_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-vfat/lib/modules/5.4.63/vfat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-vfat/lib/modules/5.4.63/fat.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-vfat into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-vfat_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-msdos/lib/modules/5.4.63/msdos.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-msdos into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-msdos_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common/lib/modules/5.4.63/lockd.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common/lib/modules/5.4.63/grace.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common/lib/modules/5.4.63/sunrpc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfs-common_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dnsresolver/lib/modules/5.4.63/dns_resolver.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dnsresolver into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dnsresolver_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs/lib/modules/5.4.63/nfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common-rpcsec/lib/modules/5.4.63/oid_registry.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common-rpcsec/lib/modules/5.4.63/rpcsec_gss_krb5.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common-rpcsec/lib/modules/5.4.63/auth_rpcgss.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-common-rpcsec into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfs-common-rpcsec_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-v3/lib/modules/5.4.63/nfsv3.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-v3 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfs-v3_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-v4/lib/modules/5.4.63/nfsv4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfs-v4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfs-v4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfsd/lib/modules/5.4.63/nfsd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-nfsd into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-nfsd_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ntfs/lib/modules/5.4.63/ntfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-ntfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-ntfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-reiserfs/lib/modules/5.4.63/reiserfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-reiserfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-reiserfs_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/squashfs/squashfs.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-squashfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-squashfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-udf/lib/modules/5.4.63/udf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-udf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-udf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-xfs/lib/modules/5.4.63/xfs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fs-xfs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fs-xfs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fuse/lib/modules/5.4.63/fuse.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fuse into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fuse_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-core/lib/modules/5.4.63/hwmon.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-core_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-core.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-dev.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ad7418/lib/modules/5.4.63/ad7418.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ad7418 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-ad7418_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ads1015 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-ads1015_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adt7410/lib/modules/5.4.63/adt7410.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adt7410/lib/modules/5.4.63/adt7x10.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adt7410 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-adt7410_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-vid/lib/modules/5.4.63/hwmon-vid.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-vid into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-vid_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adt7475/lib/modules/5.4.63/adt7475.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adt7475 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-adt7475_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-dme1737/lib/modules/5.4.63/dme1737.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-dme1737 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-dme1737_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-drivetemp/lib/modules/5.4.63/drivetemp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-drivetemp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-drivetemp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-gpiofan/lib/modules/5.4.63/gpio-fan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-gpiofan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-gpiofan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ina209/lib/modules/5.4.63/ina209.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ina209 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-ina209_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/base/regmap/regmap-i2c.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-regmap-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-regmap-i2c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ina2xx/lib/modules/5.4.63/ina2xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ina2xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-ina2xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-it87/lib/modules/5.4.63/it87.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-it87 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-it87_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm63/lib/modules/5.4.63/lm63.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm63 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm63_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm75/lib/modules/5.4.63/lm75.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm75 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm75_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm77/lib/modules/5.4.63/lm77.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm77 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm77_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm85/lib/modules/5.4.63/lm85.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm85 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm85_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm90/lib/modules/5.4.63/lm90.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm90 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm90_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm92/lib/modules/5.4.63/lm92.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm92 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm92_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm95241/lib/modules/5.4.63/lm95241.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-lm95241 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-lm95241_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ltc4151/lib/modules/5.4.63/ltc4151.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-ltc4151 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-ltc4151_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-mcp3021/lib/modules/5.4.63/mcp3021.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-mcp3021 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-mcp3021_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pmbus-core/lib/modules/5.4.63/pmbus_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pmbus-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pmbus-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pmbus-zl6100/lib/modules/5.4.63/zl6100.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pmbus-zl6100 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pmbus-zl6100_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-pwmfan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-pwmfan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-sch5627/lib/modules/5.4.63/sch5627.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-sch5627/lib/modules/5.4.63/sch56xx-common.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-sch5627 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-sch5627_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-sht21/lib/modules/5.4.63/sht21.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-sht21 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-sht21_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp102/lib/modules/5.4.63/tmp102.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp102 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-tmp102_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp103/lib/modules/5.4.63/tmp103.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp103 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-tmp103_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp421/lib/modules/5.4.63/tmp421.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-tmp421 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-tmp421_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-w83793/lib/modules/5.4.63/w83793.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-w83793 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-w83793_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adcxx/lib/modules/5.4.63/adcxx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hwmon-adcxx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hwmon-adcxx_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/algos/i2c-algo-bit.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-algo-bit into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-algo-bit_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-algo-pca/lib/modules/5.4.63/i2c-algo-pca.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-algo-pca into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-algo-pca_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-algo-pcf/lib/modules/5.4.63/i2c-algo-pcf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-algo-pcf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-algo-pcf_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/busses/i2c-gpio.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux/lib/modules/5.4.63/i2c-mux.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-mux_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-gpio/lib/modules/5.4.63/i2c-mux-gpio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-mux-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-pca9541/lib/modules/5.4.63/i2c-mux-pca9541.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-pca9541 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-mux-pca9541_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-pca954x/lib/modules/5.4.63/i2c-mux-pca954x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-mux-pca954x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-mux-pca954x_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-pxa into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-pxa_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-smbus/lib/modules/5.4.63/i2c-smbus.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-smbus into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-smbus_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-tiny-usb/lib/modules/5.4.63/i2c-tiny-usb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i2c-tiny-usb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i2c-tiny-usb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-core/lib/modules/5.4.63/industrialio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-kfifo-buf/lib/modules/5.4.63/kfifo_buf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-kfifo-buf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-kfifo-buf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-industrialio-triggered-buffer/lib/modules/5.4.63/industrialio-triggered-buffer.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-industrialio-triggered-buffer into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-industrialio-triggered-buffer_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-ad799x/lib/modules/5.4.63/ad799x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-ad799x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-ad799x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-hmc5843/lib/modules/5.4.63/hmc5843_i2c.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-hmc5843/lib/modules/5.4.63/hmc5843_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-hmc5843 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-hmc5843_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bh1750/lib/modules/5.4.63/bh1750.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bh1750 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bh1750_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-am2315/lib/modules/5.4.63/am2315.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-am2315 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-am2315_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-dht11/lib/modules/5.4.63/dht11.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-dht11 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-dht11_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680/lib/modules/5.4.63/bme680_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bme680_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680-i2c/lib/modules/5.4.63/bme680_i2c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bme680-i2c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-regmap-spi/lib/modules/5.4.63/regmap-spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-regmap-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-regmap-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680-spi/lib/modules/5.4.63/bme680_spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bme680-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bme680-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280/lib/modules/5.4.63/bmp280.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bmp280_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280-i2c/lib/modules/5.4.63/bmp280-i2c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bmp280-i2c_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-bitbang.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-bitbang into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-spi-bitbang_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280-spi/lib/modules/5.4.63/bmp280-spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-bmp280-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-bmp280-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-htu21/lib/modules/5.4.63/ms_sensors_i2c.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-htu21/lib/modules/5.4.63/htu21.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-htu21 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-htu21_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-ccs811/lib/modules/5.4.63/ccs811.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-ccs811 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-ccs811_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-si7020/lib/modules/5.4.63/si7020.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-si7020 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-si7020_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel/lib/modules/5.4.63/st_accel.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel/lib/modules/5.4.63/st_sensors.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-st_accel_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-i2c/lib/modules/5.4.63/st_sensors_i2c.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-i2c/lib/modules/5.4.63/st_accel_i2c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-st_accel-i2c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-spi/lib/modules/5.4.63/st_sensors_spi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-spi/lib/modules/5.4.63/st_accel_spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-st_accel-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-st_accel-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx/lib/modules/5.4.63/st_lsm6dsx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-lsm6dsx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx-i2c/lib/modules/5.4.63/st_lsm6dsx_i2c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-lsm6dsx-i2c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx-spi/lib/modules/5.4.63/st_lsm6dsx_spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-lsm6dsx-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-lsm6dsx-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc8/lib/modules/5.4.63/crc8.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc8 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc8_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-sps30/lib/modules/5.4.63/sps30.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-sps30 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-sps30_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-tsl4531/lib/modules/5.4.63/tsl4531.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-tsl4531 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-tsl4531_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700/lib/modules/5.4.63/fxos8700_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-fxos8700_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700-i2c/lib/modules/5.4.63/fxos8700_i2c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-fxos8700-i2c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700-spi/lib/modules/5.4.63/fxos8700_spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iio-fxos8700-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iio-fxos8700-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-core/lib/modules/5.4.63/input-core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-evdev/lib/modules/5.4.63/evdev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-evdev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-evdev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hid/lib/modules/5.4.63/hid.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hid into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hid_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hid-generic/lib/modules/5.4.63/hid-generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hid-generic into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hid-generic_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-keys/lib/modules/5.4.63/gpio_keys.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-keys into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-gpio-keys_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-polldev/lib/modules/5.4.63/input-polldev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-polldev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-polldev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-keys-polled/lib/modules/5.4.63/gpio_keys_polled.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-keys-polled into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-gpio-keys-polled_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-encoder/lib/modules/5.4.63/rotary_encoder.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-gpio-encoder into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-gpio-encoder_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-joydev/lib/modules/5.4.63/joydev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-joydev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-joydev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-matrixkmap/lib/modules/5.4.63/matrix-keymap.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-matrixkmap into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-matrixkmap_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-touchscreen-ads7846/lib/modules/5.4.63/of_touchscreen.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-touchscreen-ads7846/lib/modules/5.4.63/ads7846.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-touchscreen-ads7846 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-touchscreen-ads7846_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-uinput/lib/modules/5.4.63/uinput.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-input-uinput into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-input-uinput_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-gpio.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-leds-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-leds-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-activity/lib/modules/5.4.63/ledtrig-activity.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-activity into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-activity_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-heartbeat/lib/modules/5.4.63/ledtrig-heartbeat.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-heartbeat into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-heartbeat_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-gpio/lib/modules/5.4.63/ledtrig-gpio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-gpio_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-netdev.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-netdev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-netdev_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-default-on.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-default-on into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-default-on_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-timer.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-timer into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-timer_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-transient/lib/modules/5.4.63/ledtrig-transient.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-transient into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-transient_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-oneshot/lib/modules/5.4.63/ledtrig-oneshot.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ledtrig-oneshot into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ledtrig-oneshot_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-leds-pca963x/lib/modules/5.4.63/leds-pca963x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-leds-pca963x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-leds-pca963x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc-ccitt/lib/modules/5.4.63/crc-ccitt.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc-ccitt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc-ccitt_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc7/lib/modules/5.4.63/crc7.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-crc7 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-crc7_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lz4/lib/modules/5.4.63/lz4_decompress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lz4/lib/modules/5.4.63/lz4_compress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lz4/lib/modules/5.4.63/lz4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-lz4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-lz4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-textsearch/lib/modules/5.4.63/ts_kmp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-textsearch/lib/modules/5.4.63/ts_bm.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-textsearch/lib/modules/5.4.63/ts_fsm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-textsearch into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-textsearch_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-cordic/lib/modules/5.4.63/cordic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lib-cordic into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lib-cordic_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mii/lib/modules/5.4.63/mii.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mii into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mii_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sis190/lib/modules/5.4.63/sis190.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sis190 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sis190_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-skge/lib/modules/5.4.63/skge.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-skge into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-skge_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mdio/lib/modules/5.4.63/mdio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mdio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mdio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-alx/lib/modules/5.4.63/alx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-alx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-alx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl2/lib/modules/5.4.63/atl2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atl2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1/lib/modules/5.4.63/atl1.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atl1_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1c/lib/modules/5.4.63/atl1c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atl1c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1e/lib/modules/5.4.63/atl1e.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atl1e into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atl1e_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/libphy.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-libphy into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-libphy_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phylink into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phylink_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-gpio.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-bitbang.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mdio-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mdio-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-et131x/lib/modules/5.4.63/et131x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-et131x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-et131x_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/bcm-phy-lib.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phylib-broadcom into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phylib-broadcom_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/broadcom.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-broadcom into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phy-broadcom_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-bcm84881/lib/modules/5.4.63/bcm84881.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-bcm84881 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phy-bcm84881_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-realtek/lib/modules/5.4.63/realtek.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-realtek into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phy-realtek_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/swconfig.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-swconfig into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-swconfig_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-bcm53xx/lib/modules/5.4.63/b53_common.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-bcm53xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-bcm53xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-bcm53xx-mdio/lib/modules/5.4.63/b53_mdio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-bcm53xx-mdio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-bcm53xx-mdio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-mvsw61xx/lib/modules/5.4.63/mvsw61xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-mvsw61xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-mvsw61xx_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/ip17xx.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-ip17xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-ip17xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8306/lib/modules/5.4.63/rtl8306.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8306 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-rtl8306_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366_smi.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8366-smi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-rtl8366-smi_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366rb.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8366rb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-rtl8366rb_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366s.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8366s into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-rtl8366s_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8367b/lib/modules/5.4.63/rtl8367b.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-switch-rtl8367b into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-switch-rtl8367b_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-natsemi/lib/modules/5.4.63/natsemi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-natsemi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-natsemi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-r6040/lib/modules/5.4.63/r6040.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-r6040 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-r6040_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-niu/lib/modules/5.4.63/niu.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-niu into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-niu_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sis900/lib/modules/5.4.63/sis900.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sis900 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sis900_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sky2/lib/modules/5.4.63/sky2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sky2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sky2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-via-rhine/lib/modules/5.4.63/via-rhine.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-via-rhine into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-via-rhine_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-via-velocity/lib/modules/5.4.63/via-velocity.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-via-velocity into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-via-velocity_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-8139too/lib/modules/5.4.63/8139too.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-8139too into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-8139too_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-8139cp/lib/modules/5.4.63/8139cp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-8139cp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-8139cp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-r8169/lib/modules/5.4.63/r8169.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-r8169 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-r8169_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ne2k-pci/lib/modules/5.4.63/8390.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ne2k-pci/lib/modules/5.4.63/ne2k-pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ne2k-pci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ne2k-pci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-e100/lib/modules/5.4.63/e100.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-e100 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-e100_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-e1000/lib/modules/5.4.63/e1000.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-e1000 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-e1000_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps/lib/modules/5.4.63/pps_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pps_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ptp/lib/modules/5.4.63/ptp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ptp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ptp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-igb/lib/modules/5.4.63/igb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-igb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-igb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ixgbe/lib/modules/5.4.63/ixgbe.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ixgbe into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ixgbe_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ixgbevf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ixgbevf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i40e/lib/modules/5.4.63/i40e.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-i40e into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-i40e_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iavf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iavf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ssb/lib/modules/5.4.63/ssb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ssb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ssb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-b44/lib/modules/5.4.63/b44.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-b44 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-b44_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-3c59x/lib/modules/5.4.63/3c59x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-3c59x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-3c59x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pcnet32/lib/modules/5.4.63/pcnet32.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pcnet32 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pcnet32_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tg3/lib/modules/5.4.63/tg3.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tg3 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tg3_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-misdn/lib/modules/5.4.63/l1oip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-misdn/lib/modules/5.4.63/mISDN_dsp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-misdn/lib/modules/5.4.63/mISDN_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-misdn into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-misdn_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hfcpci/lib/modules/5.4.63/hfcpci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hfcpci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hfcpci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hfcmulti/lib/modules/5.4.63/hfcmulti.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-hfcmulti into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-hfcmulti_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-macvlan/lib/modules/5.4.63/macvlan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-macvlan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-macvlan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip/lib/modules/5.4.63/tulip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip/lib/modules/5.4.63/uli526x.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip/lib/modules/5.4.63/dmfe.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip/lib/modules/5.4.63/de2104x.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip/lib/modules/5.4.63/winbond-840.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tulip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tulip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atm/lib/modules/5.4.63/br2684.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atm/lib/modules/5.4.63/atm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-solos-pci/lib/modules/5.4.63/solos-pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-solos-pci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-solos-pci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dummy/lib/modules/5.4.63/dummy.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dummy into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dummy_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ifb/lib/modules/5.4.63/ifb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ifb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ifb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm9000/lib/modules/5.4.63/dm9000.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dm9000 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dm9000_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-forcedeth/lib/modules/5.4.63/forcedeth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-forcedeth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-forcedeth_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/fixed_phy.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/of/of_mdio.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-of-mdio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-of-mdio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-vmxnet3/lib/modules/5.4.63/vmxnet3.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-vmxnet3 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-vmxnet3_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-ks8995/lib/modules/5.4.63/spi_ks8995.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-ks8995 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-spi-ks8995_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ethoc/lib/modules/5.4.63/ethoc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ethoc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ethoc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bnx2/lib/modules/5.4.63/bnx2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bnx2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bnx2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bnx2x/lib/modules/5.4.63/bnx2x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bnx2x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bnx2x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-be2net/lib/modules/5.4.63/be2net.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-be2net into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-be2net_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mlx4-core/lib/modules/5.4.63/mlx4_en.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mlx4-core/lib/modules/5.4.63/mlx4_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mlx4-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mlx4-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mlx5-core/lib/modules/5.4.63/mlx5_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mlx5-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mlx5-core_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sfp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sfp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-reject/lib/modules/5.4.63/nf_reject_ipv4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-reject into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-reject_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-reject6/lib/modules/5.4.63/nf_reject_ipv6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-reject6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-reject6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipt/lib/modules/5.4.63/ip_tables.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipt/lib/modules/5.4.63/x_tables.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-ipt_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipt6/lib/modules/5.4.63/ip6_tables.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipt6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-ipt6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_LOG.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/nf_log_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_comment.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_limit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/nf_log_common.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/ipt_REJECT.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_TCPMSS.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_mark.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_multiport.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_tcpudp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/iptable_filter.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_mac.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/xt_time.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core/lib/modules/5.4.63/iptable_mangle.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack/lib/modules/5.4.63/nf_conntrack_rtcache.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack/lib/modules/5.4.63/nf_defrag_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack/lib/modules/5.4.63/nf_conntrack.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack/lib/modules/5.4.63/nf_defrag_ipv6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-conntrack_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-conntrack6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nat/lib/modules/5.4.63/nf_nat.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nat into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-nat_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nat6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-nat6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-flow/lib/modules/5.4.63/nf_flow_table_hw.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-flow/lib/modules/5.4.63/nf_flow_table.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-flow into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-flow_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack/lib/modules/5.4.63/xt_state.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack/lib/modules/5.4.63/xt_conntrack.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack/lib/modules/5.4.63/xt_CT.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-conntrack_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/xt_helper.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/xt_recent.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/nf_conncount.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/xt_connlimit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/xt_connmark.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra/lib/modules/5.4.63/xt_connbytes.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-extra into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-conntrack-extra_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-label/lib/modules/5.4.63/xt_connlabel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-conntrack-label into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-conntrack-label_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-filter/lib/modules/5.4.63/xt_string.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-filter/lib/modules/5.4.63/xt_bpf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-filter into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-filter_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-offload/lib/modules/5.4.63/xt_FLOWOFFLOAD.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-offload into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-offload_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_CLASSIFY.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_HL.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_tcpmss.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_hl.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_ecn.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/ipt_ECN.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_dscp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_DSCP.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_length.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt/lib/modules/5.4.63/xt_statistic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipopt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-ipopt_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipsec/lib/modules/5.4.63/ipt_ah.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipsec/lib/modules/5.4.63/xt_esp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipsec/lib/modules/5.4.63/xt_policy.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipsec into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-ipsec_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink/lib/modules/5.4.63/nfnetlink.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nfnetlink_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_netnet.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_ipmark.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_ipport.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_netport.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_ip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_ipportip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_netportnet.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/xt_set.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_ipportnet.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_net.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_netiface.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_hash_mac.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_bitmap_ip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_bitmap_port.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_bitmap_ipmac.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset/lib/modules/5.4.63/ip_set_list_set.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ipset into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-ipset_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_sed.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_sh.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_fo.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_ovf.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_wrr.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_lblc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_wlc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_nq.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_rr.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/xt_ipvs.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_dh.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_lc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs/lib/modules/5.4.63/ip_vs_lblcr.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-ipvs_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper/lib/modules/5.4.63/nf_conntrack_ftp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper/lib/modules/5.4.63/nf_nat_ftp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-nathelper_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs-ftp/lib/modules/5.4.63/ip_vs_ftp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs-ftp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-ipvs-ftp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-raw/lib/modules/5.4.63/iptable_raw.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-raw into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-raw_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_tftp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_irc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_h323.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_amanda.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_sip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_snmp_basic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_pptp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_tftp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_h323.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_snmp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_broadcast.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_pptp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_sip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_conntrack_irc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra/lib/modules/5.4.63/nf_nat_amanda.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-nathelper-extra into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-nathelper-extra_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs-sip/lib/modules/5.4.63/ip_vs_pe_sip.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-ipvs-sip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-ipvs-sip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat/lib/modules/5.4.63/xt_REDIRECT.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat/lib/modules/5.4.63/xt_nat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat/lib/modules/5.4.63/xt_MASQUERADE.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat/lib/modules/5.4.63/iptable_nat.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-nat_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables/lib/modules/5.4.63/ip6table_filter.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables/lib/modules/5.4.63/ip6table_mangle.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables/lib/modules/5.4.63/ip6t_REJECT.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables/lib/modules/5.4.63/nf_log_ipv6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ip6tables_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-raw6/lib/modules/5.4.63/ip6table_raw.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-raw6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-raw6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat6/lib/modules/5.4.63/ip6table_nat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat6/lib/modules/5.4.63/ip6t_NPT.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-nat6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat-extra/lib/modules/5.4.63/xt_NETMAP.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nat-extra into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-nat-extra_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-ulog into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-ulog_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink-log/lib/modules/5.4.63/nfnetlink_log.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink-log into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nfnetlink-log_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nflog/lib/modules/5.4.63/xt_NFLOG.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nflog into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-nflog_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink-queue/lib/modules/5.4.63/nfnetlink_queue.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nfnetlink-queue into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nfnetlink-queue_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nfqueue/lib/modules/5.4.63/xt_NFQUEUE.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-nfqueue into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-nfqueue_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-debug/lib/modules/5.4.63/xt_TRACE.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-debug into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-debug_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-led/lib/modules/5.4.63/xt_LED.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-led into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-led_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/nf_tproxy_ipv6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/nf_socket_ipv6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/nf_tproxy_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/nf_socket_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/xt_TPROXY.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy/lib/modules/5.4.63/xt_socket.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tproxy into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-tproxy_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tee/lib/modules/5.4.63/xt_TEE.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tee/lib/modules/5.4.63/nf_dup_ipv6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tee/lib/modules/5.4.63/nf_dup_ipv4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-tee into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-tee_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-u32/lib/modules/5.4.63/xt_u32.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-u32 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-u32_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-checksum/lib/modules/5.4.63/xt_CHECKSUM.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-checksum into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-checksum_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-iprange/lib/modules/5.4.63/xt_iprange.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-iprange into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-iprange_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-cluster/lib/modules/5.4.63/xt_cluster.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-cluster into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-cluster_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-clusterip/lib/modules/5.4.63/ipt_CLUSTERIP.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-clusterip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-clusterip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-extra/lib/modules/5.4.63/xt_pkttype.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-extra/lib/modules/5.4.63/xt_addrtype.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-extra/lib/modules/5.4.63/xt_quota.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-extra/lib/modules/5.4.63/xt_owner.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-extra into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-extra_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-br-netfilter/lib/modules/5.4.63/br_netfilter.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-br-netfilter into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-br-netfilter_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-physdev/lib/modules/5.4.63/xt_physdev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-physdev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-physdev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_ah.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_rt.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_mh.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_frag.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_eui64.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_ipv6header.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra/lib/modules/5.4.63/ip6t_hbh.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6tables-extra into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ip6tables-extra_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-arptables/lib/modules/5.4.63/arptable_filter.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-arptables/lib/modules/5.4.63/arpt_mangle.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-arptables/lib/modules/5.4.63/arp_tables.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-arptables into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-arptables_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebtable_broute.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_stp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_vlan.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_limit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebtable_nat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_among.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_802_3.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_redirect.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_pkttype.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_mark.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebtables.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebt_mark_m.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables/lib/modules/5.4.63/ebtable_filter.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ebtables_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4/lib/modules/5.4.63/ebt_dnat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4/lib/modules/5.4.63/ebt_arpreply.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4/lib/modules/5.4.63/ebt_ip.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4/lib/modules/5.4.63/ebt_arp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4/lib/modules/5.4.63/ebt_snat.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ebtables-ipv4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv6/lib/modules/5.4.63/ebt_ip6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-ipv6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ebtables-ipv6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-watchers/lib/modules/5.4.63/ebt_log.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-watchers/lib/modules/5.4.63/ebt_nflog.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ebtables-watchers into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ebtables-watchers_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack-netlink/lib/modules/5.4.63/nf_conntrack_netlink.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nf-conntrack-netlink into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nf-conntrack-netlink_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-hashlimit/lib/modules/5.4.63/xt_hashlimit.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-hashlimit into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-hashlimit_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-rpfilter/lib/modules/5.4.63/ipt_rpfilter.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-rpfilter/lib/modules/5.4.63/ip6t_rpfilter.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipt-rpfilter into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipt-rpfilter_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_limit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_hash.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_quota.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nf_tables_set.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_log.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_reject.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_reject_ipv6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_numgen.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_ct.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_reject_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_redir.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nf_tables.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_counter.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_reject_inet.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core/lib/modules/5.4.63/nft_objref.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-core_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-arp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-arp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-bridge/lib/modules/5.4.63/nft_reject_bridge.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-bridge/lib/modules/5.4.63/nft_meta_bridge.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-bridge into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-bridge_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-nat/lib/modules/5.4.63/nft_nat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-nat/lib/modules/5.4.63/nft_masq.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-nat into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-nat_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-offload/lib/modules/5.4.63/nf_flow_table_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-offload/lib/modules/5.4.63/nf_flow_table_inet.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-offload/lib/modules/5.4.63/nft_flow_offload.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-offload/lib/modules/5.4.63/nf_flow_table_ipv6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-offload into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-offload_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-nat6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-nat6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-netdev/lib/modules/5.4.63/nft_fwd_netdev.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-netdev/lib/modules/5.4.63/nft_dup_netdev.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-netdev/lib/modules/5.4.63/nf_dup_netdev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-netdev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-netdev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-fib/lib/modules/5.4.63/nft_fib_ipv6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-fib/lib/modules/5.4.63/nft_fib_ipv4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-fib/lib/modules/5.4.63/nft_fib.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-fib/lib/modules/5.4.63/nft_fib_inet.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nft-fib into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nft-fib_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atmtcp/lib/modules/5.4.63/atmtcp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atmtcp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atmtcp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bonding/lib/modules/5.4.63/bonding.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bonding into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bonding_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-udptunnel4/lib/modules/5.4.63/udp_tunnel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-udptunnel4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-udptunnel4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-udptunnel6/lib/modules/5.4.63/ip6_udp_tunnel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-udptunnel6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-udptunnel6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel/lib/modules/5.4.63/ip_tunnel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iptunnel_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-vxlan/lib/modules/5.4.63/vxlan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-vxlan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-vxlan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-geneve/lib/modules/5.4.63/geneve.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-geneve into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-geneve_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nsh/lib/modules/5.4.63/nsh.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nsh into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nsh_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-capi/lib/modules/5.4.63/capi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-capi/lib/modules/5.4.63/kernelcapi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-capi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-capi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-slhc/lib/modules/5.4.63/slhc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-slhc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-slhc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppp/lib/modules/5.4.63/ppp_async.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppp/lib/modules/5.4.63/ppp_generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ppp_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-isdn4linux into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-isdn4linux_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel4/lib/modules/5.4.63/tunnel4.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iptunnel4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipip/lib/modules/5.4.63/ipip.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec/lib/modules/5.4.63/xfrm_ipcomp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec/lib/modules/5.4.63/xfrm_algo.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec/lib/modules/5.4.63/af_key.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec/lib/modules/5.4.63/xfrm_user.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipsec_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec4/lib/modules/5.4.63/ipcomp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec4/lib/modules/5.4.63/ah4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec4/lib/modules/5.4.63/esp4.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec4/lib/modules/5.4.63/xfrm4_tunnel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec4 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipsec4_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel6/lib/modules/5.4.63/tunnel6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-iptunnel6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-iptunnel6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec6/lib/modules/5.4.63/xfrm6_tunnel.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec6/lib/modules/5.4.63/ipcomp6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec6/lib/modules/5.4.63/ah6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec6/lib/modules/5.4.63/esp6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipsec6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipsec6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip-vti/lib/modules/5.4.63/ip_vti.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip-vti into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ip-vti_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6-tunnel/lib/modules/5.4.63/ip6_tunnel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6-tunnel into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ip6-tunnel_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6-vti/lib/modules/5.4.63/ip6_vti.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ip6-vti into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ip6-vti_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-xfrm-interface/lib/modules/5.4.63/xfrm_interface.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-xfrm-interface into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-xfrm-interface_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sit/lib/modules/5.4.63/sit.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sit into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sit_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fou/lib/modules/5.4.63/fou.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fou into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fou_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fou6/lib/modules/5.4.63/fou6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fou6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fou6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gre/lib/modules/5.4.63/ip_gre.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gre/lib/modules/5.4.63/gre.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gre into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gre_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gre6/lib/modules/5.4.63/ip6_gre.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gre6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gre6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tun/lib/modules/5.4.63/tun.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tun into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tun_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-veth/lib/modules/5.4.63/veth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-veth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-veth_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppp-synctty/lib/modules/5.4.63/ppp_synctty.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppp-synctty into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ppp-synctty_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppox/lib/modules/5.4.63/pppox.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppox into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pppox_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppoe/lib/modules/5.4.63/pppoe.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppoe into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pppoe_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppoa/lib/modules/5.4.63/pppoatm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppoa into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pppoa_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pptp/lib/modules/5.4.63/pptp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pptp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pptp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp/lib/modules/5.4.63/l2tp_core.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp/lib/modules/5.4.63/l2tp_netlink.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-l2tp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppol2tp/lib/modules/5.4.63/l2tp_ppp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pppol2tp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pppol2tp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipoa/lib/modules/5.4.63/clip.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ipoa into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ipoa_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mppe/lib/modules/5.4.63/ppp_mppe.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mppe into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mppe_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/sch_hfsc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_matchall.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_tcindex.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_basic.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/act_mirred.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/sch_ingress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/sch_htb.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/act_skbedit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_route.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_flow.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/sch_tbf.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_u32.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/cls_fw.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core/lib/modules/5.4.63/em_u32.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-cake/lib/modules/5.4.63/sch_cake.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-cake into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-cake_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-flower/lib/modules/5.4.63/cls_flower.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-flower into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-flower_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-act-vlan/lib/modules/5.4.63/act_vlan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-act-vlan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-act-vlan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-mqprio/lib/modules/5.4.63/sch_mqprio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-mqprio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-mqprio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-connmark/lib/modules/5.4.63/act_connmark.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-connmark into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-connmark_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-ctinfo/lib/modules/5.4.63/act_ctinfo.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-ctinfo into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-ctinfo_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-ipset/lib/modules/5.4.63/em_ipset.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-ipset into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-ipset_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-bpf/lib/modules/5.4.63/cls_bpf.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-bpf/lib/modules/5.4.63/act_bpf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched-bpf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched-bpf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bpf-test/lib/modules/5.4.63/test_bpf.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bpf-test into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bpf-test_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_simple.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_csum.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_police.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/em_meta.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_ipt.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_codel.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_fq.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/em_text.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/em_cmp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_dsmark.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/em_nbyte.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_multiq.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_pie.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_gred.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_teql.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_gact.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_prio.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_sfq.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/act_pedit.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched/lib/modules/5.4.63/sch_red.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sched into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sched_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tcp-bbr/lib/modules/5.4.63/tcp_bbr.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tcp-bbr into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tcp-bbr_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ax25/lib/modules/5.4.63/mkiss.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ax25/lib/modules/5.4.63/ax25.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ax25 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ax25_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pktgen/lib/modules/5.4.63/pktgen.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pktgen into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pktgen_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp-eth/lib/modules/5.4.63/l2tp_eth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp-eth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-l2tp-eth_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp-ip/lib/modules/5.4.63/l2tp_ip6.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp-ip/lib/modules/5.4.63/l2tp_ip.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-l2tp-ip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-l2tp-ip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sctp/lib/modules/5.4.63/sctp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sctp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sctp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-netem/lib/modules/5.4.63/sch_netem.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-netem into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-netem_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-slip/lib/modules/5.4.63/slip.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-slip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-slip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mpls/lib/modules/5.4.63/mpls_iptunnel.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mpls/lib/modules/5.4.63/mpls_router.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mpls/lib/modules/5.4.63/mpls_gso.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mpls into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mpls_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nlmon/lib/modules/5.4.63/nlmon.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nlmon into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nlmon_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-macsec/lib/modules/5.4.63/macsec.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-macsec into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-macsec_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-netlink-diag/lib/modules/5.4.63/netlink_diag.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-netlink-diag into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-netlink-diag_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp775/lib/modules/5.4.63/nls_cp775.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp775 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp775_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp850/lib/modules/5.4.63/nls_cp850.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp850 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp850_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp852/lib/modules/5.4.63/nls_cp852.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp852 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp852_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp862/lib/modules/5.4.63/nls_cp862.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp862 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp862_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp864/lib/modules/5.4.63/nls_cp864.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp864 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp864_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp866/lib/modules/5.4.63/nls_cp866.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp866 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp866_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp932/lib/modules/5.4.63/nls_cp932.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp932 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp932_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp936/lib/modules/5.4.63/nls_cp936.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp936 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp936_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp950/lib/modules/5.4.63/nls_cp950.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp950 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp950_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp1250/lib/modules/5.4.63/nls_cp1250.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp1250 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp1250_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp1251/lib/modules/5.4.63/nls_cp1251.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-cp1251 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-cp1251_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-2/lib/modules/5.4.63/nls_iso8859-2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-6/lib/modules/5.4.63/nls_iso8859-6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-8/lib/modules/5.4.63/nls_cp1255.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-8 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-8_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-13/lib/modules/5.4.63/nls_iso8859-13.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-13 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-13_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-15/lib/modules/5.4.63/nls_iso8859-15.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-iso8859-15 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-iso8859-15_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-koi8r/lib/modules/5.4.63/nls_koi8-r.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-nls-koi8r into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-nls-koi8r_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-6lowpan/lib/modules/5.4.63/6lowpan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-6lowpan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-6lowpan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/rfcomm.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/bnep.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/btusb.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/hci_uart.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/hidp.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/btintel.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth/lib/modules/5.4.63/bluetooth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bluetooth_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ath3k/lib/modules/5.4.63/ath3k.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ath3k into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ath3k_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth-6lowpan/lib/modules/5.4.63/bluetooth_6lowpan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bluetooth-6lowpan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bluetooth-6lowpan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc/lib/modules/5.4.63/mmc_block.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc/lib/modules/5.4.63/mmc_core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mmc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-btmrvl/lib/modules/5.4.63/btmrvl.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-btmrvl/lib/modules/5.4.63/btmrvl_sdio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-btmrvl into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-btmrvl_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dma-buf/lib/modules/5.4.63/dma-shared-buffer.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-dma-buf into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-dma-buf_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-93cx6/lib/modules/5.4.63/eeprom_93cx6.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-93cx6 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-eeprom-93cx6_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-at24/lib/modules/5.4.63/at24.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-at24 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-eeprom-at24_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-at25/lib/modules/5.4.63/at25.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-eeprom-at25 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-eeprom-at25_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-dev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-dev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-mcp23s08/lib/modules/5.4.63/pinctrl-mcp23s08.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-mcp23s08 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-mcp23s08_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-74x164.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-nxp-74hc164 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-nxp-74hc164_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-pca953x.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-pca953x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-pca953x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-pcf857x/lib/modules/5.4.63/gpio-pcf857x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-pcf857x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-pcf857x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppdev/lib/modules/5.4.63/parport.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppdev/lib/modules/5.4.63/ppdev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ppdev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ppdev_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-parport-pc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-parport-pc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lp/lib/modules/5.4.63/lp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-lp into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-lp_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sdhci/lib/modules/5.4.63/sdhci.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sdhci/lib/modules/5.4.63/sdhci-pltfm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sdhci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sdhci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-softdog/lib/modules/5.4.63/softdog.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-softdog into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-softdog_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bcma/lib/modules/5.4.63/bcma.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bcma into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bcma_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-rtc-ds1307/lib/modules/5.4.63/rtc-ds1307.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-rtc-ds1307 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-rtc-ds1307_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-rtc-pcf8563/lib/modules/5.4.63/rtc-pcf8563.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-rtc-pcf8563 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-rtc-pcf8563_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_stresstest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_oobtest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_torturetest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_subpagetest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_pagetest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_nandecctest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_speedtest.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests/lib/modules/5.4.63/mtd_readtest.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdtests into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mtdtests_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdoops/lib/modules/5.4.63/mtdoops.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdoops into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mtdoops_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdram/lib/modules/5.4.63/mtdram.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mtdram into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mtdram_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250_base.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/serial_mctrl_gpio.ko' is built-in. rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-serial-8250/lib/modules/5.4.63/8250_pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-serial-8250 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-serial-8250_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-serial-8250-exar/lib/modules/5.4.63/8250_exar.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-serial-8250-exar into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-serial-8250-exar_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ikconfig/lib/modules/5.4.63/configs.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ikconfig into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ikconfig_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-zram/lib/modules/5.4.63/zram.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-zram/lib/modules/5.4.63/zsmalloc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-zram into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-zram_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps-gpio/lib/modules/5.4.63/pps-gpio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pps-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps-ldisc/lib/modules/5.4.63/pps-ldisc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-pps-ldisc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-pps-ldisc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-beeper/lib/modules/5.4.63/gpio-beeper.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-gpio-beeper into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-gpio-beeper_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-echo/lib/modules/5.4.63/echo.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-echo into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-echo_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bmp085 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bmp085_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bmp085-i2c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bmp085-i2c_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-bmp085-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-bmp085-spi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm/lib/modules/5.4.63/tpm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tpm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm-i2c-atmel/lib/modules/5.4.63/tpm_i2c_atmel.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm-i2c-atmel into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tpm-i2c-atmel_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm-i2c-infineon/lib/modules/5.4.63/tpm_i2c_infineon.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-tpm-i2c-infineon into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-tpm-i2c-infineon_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w83627hf-wdt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w83627hf-wdt_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-itco-wdt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-itco-wdt_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-it87-wdt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-it87-wdt_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-timer.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-seq-device.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-pcm.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-rawmidi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-pcm-oss.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-hwdep.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-mixer-oss.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/soundcore.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core/lib/modules/5.4.63/snd-compress.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ac97/lib/modules/5.4.63/ac97_bus.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ac97/lib/modules/5.4.63/snd-ac97-codec.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ac97 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ac97_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-mpu401/lib/modules/5.4.63/snd-mpu401-uart.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-mpu401 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-mpu401_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-seq/lib/modules/5.4.63/snd-seq-midi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-seq/lib/modules/5.4.63/snd-seq-midi-event.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-seq/lib/modules/5.4.63/snd-seq.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-seq into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-seq_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-ens1371/lib/modules/5.4.63/snd-ens1371.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-ens1371 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-ens1371_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-i8x0/lib/modules/5.4.63/snd-intel8x0.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-i8x0 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-i8x0_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-via82xx/lib/modules/5.4.63/snd-via82xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-via82xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-via82xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-soc-core/lib/modules/5.4.63/snd-soc-core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-soc-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-soc-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-soc-ac97/lib/modules/5.4.63/snd-soc-ac97.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-soc-ac97 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-soc-ac97_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-dummy/lib/modules/5.4.63/snd-dummy.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-dummy into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-dummy_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-core_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-realtek into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-realtek_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-cmedia into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-cmedia_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-analog into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-analog_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-idt into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-idt_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-si3054 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-si3054_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-cirrus into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-cirrus_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-ca0110 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-ca0110_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-ca0132 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-ca0132_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-conexant into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-conexant_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-via into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-via_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-sound-hda-codec-hdmi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-sound-hda-codec-hdmi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc-spi/lib/modules/5.4.63/of_mmc_spi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc-spi/lib/modules/5.4.63/mmc_spi.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mmc-spi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mmc-spi_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-gpio.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-spi-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-dev/lib/modules/5.4.63/spidev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-spi-dev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-spi-dev_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ledtrig-usbport/lib/modules/5.4.63/ledtrig-usbport.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ledtrig-usbport into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-ledtrig-usbport_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-phy-nop/lib/modules/5.4.63/phy-generic.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-phy-nop into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-phy-nop_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7100-usb.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7200-usb.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-phy-ath79-usb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-phy-ath79-usb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-uhci/lib/modules/5.4.63/uhci-hcd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-uhci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-uhci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ohci/lib/modules/5.4.63/ohci-hcd.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ohci/lib/modules/5.4.63/ohci-platform.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ohci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-ohci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ohci-pci/lib/modules/5.4.63/ohci-pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ohci-pci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-ohci-pci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ehci/lib/modules/5.4.63/ehci-hcd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-ehci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-ehci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2/lib/modules/5.4.63/fsl-mph-dr-of.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2/lib/modules/5.4.63/ehci-platform.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2/lib/modules/5.4.63/ehci-fsl.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2-pci/lib/modules/5.4.63/ehci-pci.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb2-pci into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb2-pci_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-dwc2/lib/modules/5.4.63/dwc2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-dwc2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-dwc2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-dwc3/lib/modules/5.4.63/dwc3.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-dwc3 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-dwc3_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-acm/lib/modules/5.4.63/cdc-acm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-acm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-acm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-wdm/lib/modules/5.4.63/cdc-wdm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-wdm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-wdm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-audio/lib/modules/5.4.63/snd-usb-audio.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-audio/lib/modules/5.4.63/snd-usbmidi-lib.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-audio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-audio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-printer/lib/modules/5.4.63/usblp.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-printer into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-printer_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial/lib/modules/5.4.63/usbserial.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-belkin/lib/modules/5.4.63/belkin_sa.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-belkin into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-belkin_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ch341/lib/modules/5.4.63/ch341.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ch341 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-ch341_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-edgeport/lib/modules/5.4.63/io_edgeport.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-edgeport into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-edgeport_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ftdi/lib/modules/5.4.63/ftdi_sio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ftdi into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-ftdi_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-garmin/lib/modules/5.4.63/garmin_gps.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-garmin into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-garmin_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-simple/lib/modules/5.4.63/usb-serial-simple.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-simple into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-simple_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ti-usb/lib/modules/5.4.63/ti_usb_3410_5052.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ti-usb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-ti-usb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-wwan/lib/modules/5.4.63/usb_wwan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-wwan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-wwan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ipw/lib/modules/5.4.63/ipw.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ipw into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-ipw_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mct/lib/modules/5.4.63/mct_u232.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mct into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-mct_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mos7720/lib/modules/5.4.63/mos7720.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mos7720 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-mos7720_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mos7840/lib/modules/5.4.63/mos7840.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-mos7840 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-mos7840_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-pl2303/lib/modules/5.4.63/pl2303.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-pl2303 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-pl2303_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-cp210x/lib/modules/5.4.63/cp210x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-cp210x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-cp210x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ark3116/lib/modules/5.4.63/ark3116.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-ark3116 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-ark3116_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-oti6858/lib/modules/5.4.63/oti6858.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-oti6858 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-oti6858_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-sierrawireless/lib/modules/5.4.63/sierra.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-sierrawireless into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-sierrawireless_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-visor/lib/modules/5.4.63/visor.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-visor into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-visor_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-cypress-m8/lib/modules/5.4.63/cypress_m8.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-cypress-m8 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-cypress-m8_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-keyspan/lib/modules/5.4.63/ezusb.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-keyspan/lib/modules/5.4.63/keyspan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-keyspan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-keyspan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-option/lib/modules/5.4.63/option.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-option into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-option_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-qualcomm/lib/modules/5.4.63/qcserial.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-serial-qualcomm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-serial-qualcomm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage/lib/modules/5.4.63/usb-storage.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-storage_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-datafab.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-usbat.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-sddr55.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-cypress.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-jumpshot.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-alauda.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-karma.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-freecom.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-isd200.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras/lib/modules/5.4.63/ums-sddr09.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-extras into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-storage-extras_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-uas/lib/modules/5.4.63/uas.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-storage-uas into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-storage-uas_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm/lib/modules/5.4.63/usbatm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-atm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-speedtouch/lib/modules/5.4.63/speedtch.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-speedtouch into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-atm-speedtouch_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-ueagle/lib/modules/5.4.63/ueagle-atm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-ueagle into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-atm-ueagle_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-cxacru/lib/modules/5.4.63/cxacru.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-atm-cxacru into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-atm-cxacru_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net/lib/modules/5.4.63/usbnet.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-asix/lib/modules/5.4.63/asix.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-asix into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-asix_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-asix-ax88179/lib/modules/5.4.63/ax88179_178a.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-asix-ax88179 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-asix-ax88179_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-hso/lib/modules/5.4.63/hso.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-hso into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-hso_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-kaweth/lib/modules/5.4.63/kaweth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-kaweth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-kaweth_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-pegasus/lib/modules/5.4.63/pegasus.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-pegasus into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-pegasus_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-mcs7830/lib/modules/5.4.63/mcs7830.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-mcs7830 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-mcs7830_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-smsc95xx/lib/modules/5.4.63/smsc95xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-smsc95xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-smsc95xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-dm9601-ether/lib/modules/5.4.63/dm9601.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-dm9601-ether into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-dm9601-ether_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-ether/lib/modules/5.4.63/cdc_ether.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-ether into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-cdc-ether_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-eem/lib/modules/5.4.63/cdc_eem.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-eem into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-cdc-eem_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-subset/lib/modules/5.4.63/cdc_subset.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-subset into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-cdc-subset_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-qmi-wwan/lib/modules/5.4.63/qmi_wwan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-qmi-wwan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-qmi-wwan_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rtl8150/lib/modules/5.4.63/rtl8150.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rtl8150 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-rtl8150_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rtl8152/lib/modules/5.4.63/r8152.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rtl8152 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-rtl8152_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-sr9700/lib/modules/5.4.63/sr9700.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-sr9700 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-sr9700_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rndis/lib/modules/5.4.63/rndis_host.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-rndis into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-rndis_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-ncm/lib/modules/5.4.63/cdc_ncm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-ncm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-cdc-ncm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-mbim/lib/modules/5.4.63/cdc_mbim.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-cdc-mbim into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-cdc-mbim_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-huawei-cdc-ncm/lib/modules/5.4.63/huawei_cdc_ncm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-huawei-cdc-ncm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-huawei-cdc-ncm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-sierrawireless/lib/modules/5.4.63/sierra_net.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-sierrawireless into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-sierrawireless_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-ipheth/lib/modules/5.4.63/ipheth.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-ipheth into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-ipheth_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-kalmia/lib/modules/5.4.63/kalmia.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-kalmia into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-kalmia_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-pl/lib/modules/5.4.63/plusb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-net-pl into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-net-pl_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-hid/lib/modules/5.4.63/usbhid.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-hid into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-hid_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-yealink/lib/modules/5.4.63/yealink.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-yealink into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-yealink_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-cm109/lib/modules/5.4.63/cm109.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-cm109 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-cm109_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-test/lib/modules/5.4.63/usbtest.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-test into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-test_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip/lib/modules/5.4.63/usbip-core.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usbip_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip-client/lib/modules/5.4.63/vhci-hcd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip-client into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usbip-client_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip-server/lib/modules/5.4.63/usbip-host.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbip-server into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usbip-server_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko' is built-in. rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea/lib/modules/5.4.63/ulpi.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea/lib/modules/5.4.63/ci_hdrc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea/lib/modules/5.4.63/roles.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-chipidea_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko' is built-in. rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea2/lib/modules/5.4.63/ci_hdrc_usb2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb-chipidea2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb-chipidea2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbmon/lib/modules/5.4.63/usbmon.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usbmon into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usbmon_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb3/lib/modules/5.4.63/xhci-pci.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb3/lib/modules/5.4.63/xhci-hcd.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb3/lib/modules/5.4.63/xhci-plat-hcd.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-usb3 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-usb3_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-chaoskey/lib/modules/5.4.63/chaoskey.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-chaoskey into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-chaoskey_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-core/lib/modules/5.4.63/videodev.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-videobuf2/lib/modules/5.4.63/videobuf2-v4l2.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-videobuf2/lib/modules/5.4.63/videobuf2-vmalloc.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-videobuf2/lib/modules/5.4.63/videobuf2-common.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-videobuf2/lib/modules/5.4.63/videobuf2-memops.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-videobuf2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-videobuf2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-cpia2/lib/modules/5.4.63/cpia2.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-cpia2 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-cpia2_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-pwc/lib/modules/5.4.63/pwc.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-pwc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-pwc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-uvc/lib/modules/5.4.63/uvcvideo.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-uvc into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-uvc_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-core/lib/modules/5.4.63/gspca_main.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-core into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-core_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-conex/lib/modules/5.4.63/gspca_conex.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-conex into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-conex_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-etoms/lib/modules/5.4.63/gspca_etoms.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-etoms into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-etoms_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-finepix/lib/modules/5.4.63/gspca_finepix.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-finepix into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-finepix_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-mars/lib/modules/5.4.63/gspca_mars.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-mars into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-mars_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-mr97310a/lib/modules/5.4.63/gspca_mr97310a.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-mr97310a into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-mr97310a_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov519/lib/modules/5.4.63/gspca_ov519.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov519 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-ov519_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov534/lib/modules/5.4.63/gspca_ov534.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov534 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-ov534_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov534-9/lib/modules/5.4.63/gspca_ov534_9.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-ov534-9 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-ov534-9_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-pac207/lib/modules/5.4.63/gspca_pac207.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-pac207 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-pac207_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-pac7311/lib/modules/5.4.63/gspca_pac7311.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-pac7311 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-pac7311_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-se401/lib/modules/5.4.63/gspca_se401.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-se401 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-se401_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sn9c20x/lib/modules/5.4.63/gspca_sn9c20x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sn9c20x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sn9c20x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sonixb/lib/modules/5.4.63/gspca_sonixb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sonixb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sonixb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sonixj/lib/modules/5.4.63/gspca_sonixj.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sonixj into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sonixj_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca500/lib/modules/5.4.63/gspca_spca500.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca500 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca500_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca501/lib/modules/5.4.63/gspca_spca501.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca501 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca501_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca505/lib/modules/5.4.63/gspca_spca505.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca505 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca505_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca506/lib/modules/5.4.63/gspca_spca506.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca506 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca506_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca508/lib/modules/5.4.63/gspca_spca508.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca508 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca508_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca561/lib/modules/5.4.63/gspca_spca561.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-spca561 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-spca561_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sq905/lib/modules/5.4.63/gspca_sq905.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sq905 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sq905_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sq905c/lib/modules/5.4.63/gspca_sq905c.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sq905c into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sq905c_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-stk014/lib/modules/5.4.63/gspca_stk014.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-stk014 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-stk014_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sunplus/lib/modules/5.4.63/gspca_sunplus.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-sunplus into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-sunplus_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-t613/lib/modules/5.4.63/gspca_t613.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-t613 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-t613_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-tv8532/lib/modules/5.4.63/gspca_tv8532.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-tv8532 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-tv8532_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-vc032x/lib/modules/5.4.63/gspca_vc032x.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-vc032x into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-vc032x_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-zc3xx/lib/modules/5.4.63/gspca_zc3xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-zc3xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-zc3xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-m5602/lib/modules/5.4.63/gspca_m5602.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-m5602 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-m5602_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-stv06xx/lib/modules/5.4.63/gspca_stv06xx.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-stv06xx into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-stv06xx_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-gl860/lib/modules/5.4.63/gspca_gl860.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-gl860 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-gl860_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-jeilinj/lib/modules/5.4.63/gspca_jeilinj.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-jeilinj into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-jeilinj_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-konica/lib/modules/5.4.63/gspca_konica.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-video-gspca-konica into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-video-gspca-konica_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1/lib/modules/5.4.63/wire.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-gpio/lib/modules/5.4.63/w1-gpio.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-gpio into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-master-gpio_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-ds2482/lib/modules/5.4.63/ds2482.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-ds2482 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-master-ds2482_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-ds2490/lib/modules/5.4.63/ds2490.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-master-ds2490 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-master-ds2490_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-therm/lib/modules/5.4.63/w1_therm.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-therm into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-therm_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-smem/lib/modules/5.4.63/w1_smem.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-smem into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-smem_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2431/lib/modules/5.4.63/w1_ds2431.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2431 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-ds2431_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2433/lib/modules/5.4.63/w1_ds2433.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2433 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-ds2433_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2760 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-ds2760_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2413/lib/modules/5.4.63/w1_ds2413.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-w1-slave-ds2413 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-w1-slave-ds2413_5.4.63-1_mips_24kc.ipk Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-net-prism54 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-net-prism54_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-net-rtl8192su/lib/modules/5.4.63/r8712u.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-net-rtl8192su into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-net-rtl8192su_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ieee802154/lib/modules/5.4.63/ieee802154.ko: relocatable rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ieee802154/lib/modules/5.4.63/ieee802154_socket.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ieee802154 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ieee802154_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mac802154/lib/modules/5.4.63/mac802154.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mac802154 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mac802154_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fakelb/lib/modules/5.4.63/fakelb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-fakelb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-fakelb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atusb/lib/modules/5.4.63/atusb.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-atusb into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-atusb_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-at86rf230/lib/modules/5.4.63/at86rf230.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-at86rf230 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-at86rf230_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mrf24j40/lib/modules/5.4.63/mrf24j40.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-mrf24j40 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-mrf24j40_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-cc2520/lib/modules/5.4.63/cc2520.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-cc2520 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-cc2520_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ca8210/lib/modules/5.4.63/ca8210.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ca8210 into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ca8210_5.4.63-1_mips_24kc.ipk rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ieee802154-6lowpan/lib/modules/5.4.63/ieee802154_6lowpan.ko: relocatable Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-ieee802154-6lowpan into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-ieee802154-6lowpan_5.4.63-1_mips_24kc.ipk NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-reset.ko' is built-in. Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/ipkg-mips_24kc/kmod-leds-reset into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-leds-reset_5.4.63-1_mips_24kc.ipk echo "kernel" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/block/aoe/aoe.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe/etc/modules.d; ( echo "aoe"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe/etc/modules.d/30-aoe; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/libata.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/libahci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/ahci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/etc/modules.d; ( echo "ahci"; echo "libahci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/etc/modules.d/41-ata-ahci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/etc/modules-boot.d; ln -sf ../modules.d/41-ata-ahci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/pata_artop.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/etc/modules.d; ( echo "pata_artop"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/etc/modules.d/41-ata-artop; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/etc/modules-boot.d; ln -sf ../modules.d/41-ata-artop /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/sata_mv.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/etc/modules.d; ( echo "sata_mv"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/etc/modules.d/41-ata-marvell-sata; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/etc/modules-boot.d; ln -sf ../modules.d/41-ata-marvell-sata /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/sata_nv.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/etc/modules.d; ( echo "sata_nv"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/etc/modules.d/41-ata-nvidia-sata; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/etc/modules-boot.d; ln -sf ../modules.d/41-ata-nvidia-sata /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/pata_pdc202xx_old.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/etc/modules.d; ( echo "pata_pdc202xx_old"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/etc/modules.d/41-ata-pdc202xx-old; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/etc/modules-boot.d; ln -sf ../modules.d/41-ata-pdc202xx-old /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/ata_piix.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/etc/modules.d; ( echo "ata_piix"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/etc/modules.d/41-ata-piix; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/etc/modules-boot.d; ln -sf ../modules.d/41-ata-piix /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/sata_sil.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/etc/modules.d; ( echo "sata_sil"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/etc/modules.d/41-ata-sil; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/etc/modules-boot.d; ln -sf ../modules.d/41-ata-sil /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/sata_sil24.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/etc/modules.d; ( echo "sata_sil24"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/etc/modules.d/41-ata-sil24; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/etc/modules-boot.d; ln -sf ../modules.d/41-ata-sil24 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ata/sata_via.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/etc/modules.d; ( echo "sata_via"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/etc/modules.d/41-ata-via-sata; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/etc/modules-boot.d; ln -sf ../modules.d/41-ata-via-sata /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/devices/block2mtd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/dax/dax.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-mod.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-crypt.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-log.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-mirror.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-region-hash.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm/etc/modules.d; ( echo "dm-crypt"; echo "dm-log"; echo "dm-mirror"; echo "dm-mod"; echo "dm-region-hash"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm/etc/modules.d/30-dm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/dm-raid.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid/etc/modules.d; ( echo "dm-raid"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid/etc/modules.d/31-dm-raid; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/iscsi_tcp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/libiscsi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/libiscsi_tcp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/scsi_transport_iscsi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator/etc/modules.d; ( echo "iscsi_tcp"; echo "libiscsi"; echo "libiscsi_tcp"; echo "scsi_transport_iscsi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator/etc/modules.d/iscsi-initiator; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/md-mod.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod/etc/modules.d; ( echo "md-mod"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod/etc/modules.d/27-md-mod; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/linear.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear/etc/modules.d; ( echo "linear"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear/etc/modules.d/28-md-linear; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/raid0.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0/etc/modules.d; ( echo "raid0"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0/etc/modules.d/28-md-raid0; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/raid1.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1/etc/modules.d; ( echo "raid1"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1/etc/modules.d/28-md-raid1; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/raid10.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10/etc/modules.d; ( echo "raid10"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10/etc/modules.d/28-md-raid10; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/async_tx/async_tx.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/async_tx/async_memcpy.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/async_tx/async_xor.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/async_tx/async_pq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/async_tx/async_raid6_recov.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/raid456.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456/etc/modules.d; ( echo "async_memcpy"; echo "async_pq"; echo "async_raid6_recov"; echo "async_tx"; echo "async_xor"; echo "raid456"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456/etc/modules.d/28-md-raid456; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/md/multipath.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath/etc/modules.d; ( echo "multipath"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath/etc/modules.d/29-md-multipath; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/block/loop.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop/etc/modules.d; ( echo "loop"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop/etc/modules.d/30-loop; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/block/nbd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd/etc/modules.d; ( echo "nbd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd/etc/modules.d/30-nbd; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/scsi_mod.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/sd_mod.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/etc/modules.d; ( echo "scsi_mod"; echo "sd_mod"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/etc/modules.d/40-scsi-core; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/etc/modules-boot.d; ln -sf ../modules.d/40-scsi-core /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/sg.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic/etc/modules.d; ( echo "sg"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic/etc/modules.d/65-scsi-generic; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/cdrom/cdrom.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/sr_mod.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom/etc/modules.d; ( echo "sr_mod"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom/etc/modules.d/45-scsi-cdrom; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/scsi/st.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape/etc/modules.d; ( echo "st"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape/etc/modules.d/45-scsi-tape; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/block/bfq.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq/etc/modules.d; ( echo "bfq"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq/etc/modules.d/10-iosched-bfq; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/can-dev.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/can/can.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can/etc/modules.d; ( echo "can"; echo "can-dev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can/etc/modules.d/can; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/can/can-bcm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm/etc/modules.d; ( echo "can-bcm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm/etc/modules.d/can-bcm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/c_can/c_can.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can/etc/modules.d; ( echo "c_can"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can/etc/modules.d/can-c-can; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/c_can/c_can_pci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci/etc/modules.d; ( echo "c_can_pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci/etc/modules.d/can-c-can-pci; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/c_can/c_can_platform.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform/etc/modules.d; ( echo "c_can_platform"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform/etc/modules.d/can-c-can-platform; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/can/can-gw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw/etc/modules.d; ( echo "can-gw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw/etc/modules.d/can-gw; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/spi/mcp251x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x/etc/modules.d; ( echo "can-mcp251x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x/etc/modules.d/can-mcp251x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/can/can-raw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw/etc/modules.d; ( echo "can-raw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw/etc/modules.d/can-raw; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/slcan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan/etc/modules.d; ( echo "slcan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan/etc/modules.d/can-slcan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/usb/usb_8dev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev/etc/modules.d; ( echo "usb_8dev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev/etc/modules.d/can-usb-8dev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/usb/ems_usb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems/etc/modules.d; ( echo "ems_usb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems/etc/modules.d/can-usb-ems; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/usb/esd_usb2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd/etc/modules.d; ( echo "esd_usb2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd/etc/modules.d/can-usb-esd; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/usb/kvaser_usb/kvaser_usb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser/etc/modules.d; ( echo "kvaser_usb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser/etc/modules.d/can-usb-kvaser; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/usb/peak_usb/peak_usb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak/etc/modules.d; ( echo "peak_usb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak/etc/modules.d/can-usb-peak; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/can/vcan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan/etc/modules.d; ( echo "vcan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan/etc/modules.d/can-vcan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crypto_acompress.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress/etc/modules.d; ( echo "crypto_acompress"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress/etc/modules.d/09-crypto-acompress; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/aead.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/etc/modules.d; ( echo "aead"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/etc/modules.d/09-crypto-aead; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/etc/modules-boot.d; ln -sf ../modules.d/09-crypto-aead /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/arc4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4/etc/modules.d; ( echo "arc4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4/etc/modules.d/09-crypto-arc4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/authenc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc/etc/modules.d; ( echo "authenc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc/etc/modules.d/09-crypto-authenc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cbc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc/etc/modules.d; ( echo "cbc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc/etc/modules.d/09-crypto-cbc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ccm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm/etc/modules.d; ( echo "ccm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm/etc/modules.d/09-crypto-ccm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cmac.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac/etc/modules.d; ( echo "cmac"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac/etc/modules.d/09-crypto-cmac; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crc32_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/etc/modules.d; ( echo "crc32_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/etc/modules.d/04-crypto-crc32; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/etc/modules-boot.d; ln -sf ../modules.d/04-crypto-crc32 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crc32c_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/etc/modules.d; ( echo "crc32c_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/etc/modules.d/04-crypto-crc32c; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/etc/modules-boot.d; ln -sf ../modules.d/04-crypto-crc32c /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ctr.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr/etc/modules.d; ( echo "ctr"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr/etc/modules.d/09-crypto-ctr; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cts.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts/etc/modules.d; ( echo "cts"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts/etc/modules.d/09-crypto-cts; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/deflate.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate/etc/modules.d; ( echo "deflate"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate/etc/modules.d/09-crypto-deflate; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/des_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crypto/libdes.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des/etc/modules.d; ( echo "des_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des/etc/modules.d/09-crypto-des; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ecb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb/etc/modules.d; ( echo "ecb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb/etc/modules.d/09-crypto-ecb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ecdh_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ecc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh/etc/modules.d; ( echo "ecdh_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh/etc/modules.d/10-crypto-ecdh; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/echainiv.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv/etc/modules.d; ( echo "echainiv"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv/etc/modules.d/09-crypto-echainiv; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/fcrypt.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt/etc/modules.d; ( echo "fcrypt"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt/etc/modules.d/09-crypto-fcrypt; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/gcm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm/etc/modules.d; ( echo "gcm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm/etc/modules.d/09-crypto-gcm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/xcbc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc/etc/modules.d; ( echo "xcbc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc/etc/modules.d/09-crypto-xcbc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/gf128mul.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128/etc/modules.d; ( echo "gf128mul"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128/etc/modules.d/09-crypto-gf128; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/ghash-generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash/etc/modules.d; ( echo "ghash-generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash/etc/modules.d/09-crypto-ghash; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crypto_hash.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/etc/modules.d; ( echo "crypto_hash"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/etc/modules.d/02-crypto-hash; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/etc/modules-boot.d; ln -sf ../modules.d/02-crypto-hash /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/hmac.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac/etc/modules.d; ( echo "hmac"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac/etc/modules.d/09-crypto-hmac; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-ccp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-ccp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-ccp true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-ccp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-geode.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-geode mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-geode true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-geode.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/crypto/hifn_795x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x/etc/modules.d; ( echo "hifn_795x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x/etc/modules.d/09-crypto-hw-hifn-795x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-padlock.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-padlock mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-padlock true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-padlock.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-talitos.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-talitos mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-talitos true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-talitos.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/kpp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp/etc/modules.d; ( echo "kpp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp/etc/modules.d/09-crypto-kpp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cryptomgr.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/etc/modules.d; ( echo "cryptomgr"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/etc/modules.d/09-crypto-manager; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/etc/modules-boot.d; ln -sf ../modules.d/09-crypto-manager /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/md4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4/etc/modules.d; ( echo "md4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4/etc/modules.d/09-crypto-md4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/md5.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5/etc/modules.d; ( echo "md5"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5/etc/modules.d/09-crypto-md5; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/michael_mic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic/etc/modules.d; ( echo "michael_mic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic/etc/modules.d/09-crypto-michael-mic; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/anubis.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/camellia_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cast_common.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cast5_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/cast6_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/khazad.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/tea.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/tgr192.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/twofish_common.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/wp512.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/twofish_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/blowfish_common.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/blowfish_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/serpent_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc/etc/modules.d; ( echo "0"; echo "10"; echo "anubis"; echo "blowfish_common"; echo "blowfish_generic"; echo "camellia_generic"; echo "cast5_generic"; echo "cast6_generic"; echo "cast_common"; echo "khazad"; echo "serpent_generic"; echo "tea"; echo "tgr192"; echo "twofish_common"; echo "twofish_generic"; echo "wp512"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc/etc/modules.d/10-crypto-misc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crypto_null.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null/etc/modules.d; ( echo "crypto_null"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null/etc/modules.d/09-crypto-null; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/pcbc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc/etc/modules.d; ( echo "pcbc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc/etc/modules.d/09-crypto-pcbc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcompress.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcompress mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcompress true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcompress.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/mpi/mpi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/akcipher.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/rsa_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa/etc/modules.d; ( echo "rsa_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa/etc/modules.d/10-crypto-rsa; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/rmd160.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160/etc/modules.d; ( echo "rmd160"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160/etc/modules.d/09-crypto-rmd160; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/drbg.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/jitterentropy_rng.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/rng.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng/etc/modules.d; ( echo "drbg"; echo "jitterentropy_rng"; echo "rng"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng/etc/modules.d/09-crypto-rng; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/rng.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/seqiv.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv/etc/modules.d; ( echo "seqiv"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv/etc/modules.d/09-crypto-seqiv; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/sha1_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1/etc/modules.d; ( echo "sha1_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1/etc/modules.d/09-crypto-sha1; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/sha256_generic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crypto/libsha256.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256/etc/modules.d; ( echo "sha256_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256/etc/modules.d/09-crypto-sha256; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/sha512_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512/etc/modules.d; ( echo "sha512_generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512/etc/modules.d/09-crypto-sha512; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/tcrypt.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/af_alg.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/algif_aead.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/algif_hash.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/algif_rng.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/algif_skcipher.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/crypto_user.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user/etc/modules.d; ( echo "af_alg"; echo "algif_aead"; echo "algif_hash"; echo "algif_rng"; echo "algif_skcipher"; echo "crypto_user"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user/etc/modules.d/09-crypto-user; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-wq.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-wq mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-wq true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-wq.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/xts.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts/etc/modules.d; ( echo "xts"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts/etc/modules.d/09-crypto-xts; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/firewire/firewire-core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/firewire/firewire-net.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net/etc/modules.d; ( echo "firewire-net"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net/etc/modules.d/firewire-net; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/firewire/firewire-ohci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci/etc/modules.d; ( echo "firewire-ohci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci/etc/modules.d/firewire-ohci; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/firewire/firewire-sbp2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2/etc/modules.d; ( echo "firewire-sbp2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2/etc/modules.d/firewire-sbp2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/autofs/autofs4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4/etc/modules.d; ( echo "autofs4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4/etc/modules.d/30-fs-autofs4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/btrfs/btrfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/etc/modules.d; ( echo "btrfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/etc/modules.d/30-fs-btrfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-btrfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/cifs/cifs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs/etc/modules.d; ( echo "cifs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs/etc/modules.d/30-fs-cifs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/configfs/configfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs/etc/modules.d; ( echo "configfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs/etc/modules.d/30-fs-configfs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/cramfs/cramfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs/etc/modules.d; ( echo "cramfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs/etc/modules.d/30-fs-cramfs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/exportfs/exportfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/etc/modules.d; ( echo "exportfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/etc/modules.d/20-fs-exportfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/etc/modules-boot.d; ln -sf ../modules.d/20-fs-exportfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/exportfs/exportfs.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/ext4/ext4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/jbd2/jbd2.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/mbcache.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/etc/modules.d; ( echo "ext4"; echo "jbd2"; echo "mbcache"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/etc/modules.d/30-fs-ext4; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/etc/modules-boot.d; ln -sf ../modules.d/30-fs-ext4 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/f2fs/f2fs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/etc/modules.d; ( echo "f2fs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/etc/modules.d/30-fs-f2fs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-f2fs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/fscache/fscache.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache/etc/modules.d; ( echo "fscache"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache/etc/modules.d/29-fs-fscache; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/hfs/hfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs/etc/modules.d; ( echo "hfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs/etc/modules.d/30-fs-hfs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/hfsplus/hfsplus.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus/etc/modules.d; ( echo "hfsplus"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus/etc/modules.d/30-fs-hfsplus; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/isofs/isofs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs/etc/modules.d; ( echo "isofs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs/etc/modules.d/30-fs-isofs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/jfs/jfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/etc/modules.d; ( echo "jfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/etc/modules.d/30-fs-jfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-jfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/minix/minix.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix/etc/modules.d; ( echo "minix"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix/etc/modules.d/30-fs-minix; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/fat/msdos.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos/etc/modules.d; ( echo "msdos"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos/etc/modules.d/40-fs-msdos; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nfs/nfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs/etc/modules.d; ( echo "nfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs/etc/modules.d/40-fs-nfs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/lockd/lockd.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sunrpc/sunrpc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nfs_common/grace.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common/etc/modules.d; ( echo "grace"; echo "lockd"; echo "sunrpc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common/etc/modules.d/30-fs-nfs-common; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/oid_registry.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sunrpc/auth_gss/auth_rpcgss.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sunrpc/auth_gss/rpcsec_gss_krb5.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec/etc/modules.d; ( echo "auth_rpcgss"; echo "oid_registry"; echo "rpcsec_gss_krb5"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec/etc/modules.d/31-fs-nfs-common-rpcsec; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nfs/nfsv3.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3/etc/modules.d; ( echo "nfsv3"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3/etc/modules.d/41-fs-nfs-v3; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nfs/nfsv4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4/etc/modules.d; ( echo "nfsv4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4/etc/modules.d/41-fs-nfs-v4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nfsd/nfsd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd/etc/modules.d; ( echo "nfsd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd/etc/modules.d/40-fs-nfsd; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/ntfs/ntfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs/etc/modules.d; ( echo "ntfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs/etc/modules.d/30-fs-ntfs; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/reiserfs/reiserfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/etc/modules.d; ( echo "reiserfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/etc/modules.d/30-fs-reiserfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-reiserfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/squashfs/squashfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/etc/modules.d; ( echo "squashfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/etc/modules.d/30-fs-squashfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-squashfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/squashfs/squashfs.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/udf/udf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf/etc/modules.d; ( echo "udf"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf/etc/modules.d/30-fs-udf; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/fat/fat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/fat/vfat.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat/etc/modules.d; ( echo "fat"; echo "vfat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat/etc/modules.d/30-fs-vfat; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/xfs/xfs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/etc/modules.d; ( echo "xfs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/etc/modules.d/30-fs-xfs; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/etc/modules-boot.d; ln -sf ../modules.d/30-fs-xfs /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/fuse/fuse.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse/etc/modules.d; ( echo "fuse"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse/etc/modules.d/80-fuse; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/hwmon.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/ad7418.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418/etc/modules.d; ( echo "ad7418"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418/etc/modules.d/60-hwmon-ad7418; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ads1015.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ads1015 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ads1015 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ads1015.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/adt7x10.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/adt7410.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410/etc/modules.d; ( echo "adt7410"; echo "adt7x10"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410/etc/modules.d/60-hwmon-adt7410; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/adt7475.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475/etc/modules.d; ( echo "adt7475"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475/etc/modules.d/hwmon-adt7475; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/dme1737.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737/etc/modules.d; ( echo "dme1737"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737/etc/modules.d/hwmon-dme1737; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/drivetemp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp/etc/modules.d; ( echo "drivetemp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp/etc/modules.d/60-hwmon-drivetemp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/gpio-fan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan/etc/modules.d; ( echo "gpio-fan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan/etc/modules.d/60-hwmon-gpiofan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/ina209.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209/etc/modules.d; ( echo "ina209"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209/etc/modules.d/hwmon-ina209; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/ina2xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx/etc/modules.d; ( echo "ina2xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx/etc/modules.d/hwmon-ina2xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/it87.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87/etc/modules.d; ( echo "it87"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87/etc/modules.d/hwmon-it87; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm63.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63/etc/modules.d; ( echo "lm63"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63/etc/modules.d/hwmon-lm63; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm75.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75/etc/modules.d; ( echo "lm75"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75/etc/modules.d/hwmon-lm75; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm77.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77/etc/modules.d; ( echo "lm77"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77/etc/modules.d/hwmon-lm77; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm85.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85/etc/modules.d; ( echo "lm85"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85/etc/modules.d/hwmon-lm85; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm90.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90/etc/modules.d; ( echo "lm90"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90/etc/modules.d/hwmon-lm90; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm92.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92/etc/modules.d; ( echo "lm92"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92/etc/modules.d/hwmon-lm92; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/lm95241.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241/etc/modules.d; ( echo "lm95241"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241/etc/modules.d/hwmon-lm95241; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/ltc4151.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151/etc/modules.d; ( echo "ltc4151"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151/etc/modules.d/hwmon-ltc4151; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/mcp3021.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021/etc/modules.d; ( echo "mcp3021"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021/etc/modules.d/hwmon-mcp3021; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/pmbus/pmbus_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/pmbus/zl6100.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100/etc/modules.d; ( echo "zl6100"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100/etc/modules.d/pmbus-zl6100; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-pwmfan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-pwmfan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-pwmfan true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-pwmfan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/sch5627.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/sch56xx-common.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627/etc/modules.d; ( echo "sch5627"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627/etc/modules.d/hwmon-sch5627; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/sht21.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21/etc/modules.d; ( echo "sht21"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21/etc/modules.d/hwmon-sht21; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/tmp102.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102/etc/modules.d; ( echo "tmp102"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102/etc/modules.d/hwmon-tmp102; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/tmp103.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103/etc/modules.d; ( echo "tmp103"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103/etc/modules.d/hwmon-tmp103; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/tmp421.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421/etc/modules.d; ( echo "tmp421"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421/etc/modules.d/60-hwmon-tmp421; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/hwmon-vid.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid/etc/modules.d; ( echo "hwmon-vid"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid/etc/modules.d/41-hwmon-vid; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/w83793.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793/etc/modules.d; ( echo "w83793"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793/etc/modules.d/hwmon-w83793; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hwmon/adcxx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx/etc/modules.d; ( echo "adcxx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx/etc/modules.d/60-hwmon-adcxx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-dev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core/etc/modules.d; ( echo "i2c-core"; echo "i2c-dev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core/etc/modules.d/51-i2c-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-core.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-dev.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/algos/i2c-algo-bit.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit/etc/modules.d; ( echo "i2c-algo-bit"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit/etc/modules.d/55-i2c-algo-bit; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/algos/i2c-algo-bit.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/algos/i2c-algo-pca.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca/etc/modules.d; ( echo "i2c-algo-pca"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca/etc/modules.d/55-i2c-algo-pca; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/algos/i2c-algo-pcf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf/etc/modules.d; ( echo "i2c-algo-pcf"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf/etc/modules.d/55-i2c-algo-pcf; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/busses/i2c-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio/etc/modules.d; ( echo "i2c-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio/etc/modules.d/59-i2c-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/busses/i2c-gpio.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-mux.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux/etc/modules.d; ( echo "i2c-mux"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux/etc/modules.d/51-i2c-mux; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/muxes/i2c-mux-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio/etc/modules.d; ( echo "i2c-mux-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio/etc/modules.d/51-i2c-mux-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/muxes/i2c-mux-pca9541.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541/etc/modules.d; ( echo "i2c-mux-pca9541"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541/etc/modules.d/51-i2c-mux-pca9541; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/muxes/i2c-mux-pca954x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x/etc/modules.d; ( echo "i2c-mux-pca954x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x/etc/modules.d/51-i2c-mux-pca954x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-pxa.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-pxa mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-pxa true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-pxa.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/i2c-smbus.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus/etc/modules.d; ( echo "i2c-smbus"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus/etc/modules.d/58-i2c-smbus; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/i2c/busses/i2c-tiny-usb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb/etc/modules.d; ( echo "i2c-tiny-usb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb/etc/modules.d/59-i2c-tiny-usb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/industrialio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core/etc/modules.d; ( echo "industrialio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core/etc/modules.d/55-iio-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/buffer/kfifo_buf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf/etc/modules.d; ( echo "kfifo_buf"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf/etc/modules.d/55-iio-kfifo-buf; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/buffer/industrialio-triggered-buffer.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer/etc/modules.d; ( echo "industrialio-triggered-buffer"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer/etc/modules.d/55-industrialio-triggered-buffer; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/adc/ad799x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x/etc/modules.d; ( echo "ad799x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x/etc/modules.d/56-iio-ad799x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/magnetometer/hmc5843_i2c.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/magnetometer/hmc5843_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843/etc/modules.d; ( echo "hmc5843"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843/etc/modules.d/56-iio-hmc5843; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/light/bh1750.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750/etc/modules.d; ( echo "bh1750"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750/etc/modules.d/56-iio-bh1750; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/humidity/am2315.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315/etc/modules.d; ( echo "am2315"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315/etc/modules.d/56-iio-am2315; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/humidity/dht11.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11/etc/modules.d; ( echo "dht11"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11/etc/modules.d/56-iio-dht11; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/chemical/bme680_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/chemical/bme680_i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c/etc/modules.d; ( echo "bme680-i2c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c/etc/modules.d/iio-bme680-i2c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/chemical/bme680_spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi/etc/modules.d; ( echo "bme680-spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi/etc/modules.d/iio-bme680-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/pressure/bmp280.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/pressure/bmp280-i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c/etc/modules.d; ( echo "bmp280-i2c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c/etc/modules.d/iio-bmp280-i2c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/pressure/bmp280-spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi/etc/modules.d; ( echo "bmp280-spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi/etc/modules.d/iio-bmp280-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/humidity/htu21.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/common/ms_sensors/ms_sensors_i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21/etc/modules.d; ( echo "htu21"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21/etc/modules.d/56-iio-htu21; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/chemical/ccs811.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811/etc/modules.d; ( echo "ccs811"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811/etc/modules.d/56-iio-ccs811; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/humidity/si7020.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020/etc/modules.d; ( echo "si7020"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020/etc/modules.d/56-iio-si7020; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/accel/st_accel.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/common/st_sensors/st_sensors.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/accel/st_accel_i2c.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/common/st_sensors/st_sensors_i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c/etc/modules.d; ( echo "st_accel_i2c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c/etc/modules.d/56-iio-st_accel-i2c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/accel/st_accel_spi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/common/st_sensors/st_sensors_spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi/etc/modules.d; ( echo "st_accel_spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi/etc/modules.d/56-iio-st_accel-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/st_lsm6dsx/st_lsm6dsx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx/etc/modules.d; ( echo "st_lsm6dsx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx/etc/modules.d/iio-lsm6dsx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c/etc/modules.d; ( echo "st_lsm6dsx-i2c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c/etc/modules.d/iio-lsm6dsx-i2c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi/etc/modules.d; ( echo "st_lsm6dsx-spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi/etc/modules.d/iio-lsm6dsx-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/chemical/sps30.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30/etc/modules.d; ( echo "sps30"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30/etc/modules.d/iio-sps30; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/light/tsl4531.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531/etc/modules.d; ( echo "tsl4531"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531/etc/modules.d/56-iio-tsl4531; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/fxos8700_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700/etc/modules.d; ( echo "fxos8700"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700/etc/modules.d/56-iio-fxos8700; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/fxos8700_i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c/etc/modules.d; ( echo "fxos8700_i2c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c/etc/modules.d/56-iio-fxos8700-i2c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/iio/imu/fxos8700_spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi/etc/modules.d; ( echo "fxos8700_spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi/etc/modules.d/56-iio-fxos8700-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hid/hid.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid/etc/modules.d; ( echo "hid"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid/etc/modules.d/61-hid; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hid/hid-generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic/etc/modules.d; ( echo "hid-generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic/etc/modules.d/hid-generic; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/input-core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/evdev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev/etc/modules.d; ( echo "evdev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev/etc/modules.d/60-input-evdev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/keyboard/gpio_keys.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/etc/modules.d; ( echo "gpio_keys"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/etc/modules.d/input-gpio-keys; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/etc/modules-boot.d; ln -sf ../modules.d/input-gpio-keys /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/keyboard/gpio_keys_polled.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/etc/modules.d; ( echo "gpio_keys_polled"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/etc/modules.d/input-gpio-keys-polled; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/etc/modules-boot.d; ln -sf ../modules.d/input-gpio-keys-polled /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/misc/rotary_encoder.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder/etc/modules.d; ( echo "rotary_encoder"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder/etc/modules.d/input-gpio-encoder; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/joydev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev/etc/modules.d; ( echo "joydev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev/etc/modules.d/input-joydev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/input-polldev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/matrix-keymap.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap/etc/modules.d; ( echo "matrix-keymap"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap/etc/modules.d/input-matrixkmap; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/touchscreen/ads7846.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/touchscreen/of_touchscreen.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846/etc/modules.d; ( echo "ads7846"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846/etc/modules.d/input-touchscreen-ads7846; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/misc/uinput.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput/etc/modules.d; ( echo "uinput"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput/etc/modules.d/input-uinput; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/etc/modules.d; ( echo "leds-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/etc/modules.d/60-leds-gpio; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/etc/modules-boot.d; ln -sf ../modules.d/60-leds-gpio /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-gpio.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-activity.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity/etc/modules.d; ( echo "ledtrig-activity"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity/etc/modules.d/50-ledtrig-activity; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-heartbeat.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat/etc/modules.d; ( echo "ledtrig-heartbeat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat/etc/modules.d/50-ledtrig-heartbeat; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio/etc/modules.d; ( echo "ledtrig-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio/etc/modules.d/50-ledtrig-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-netdev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev/etc/modules.d; ( echo "ledtrig-netdev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev/etc/modules.d/50-ledtrig-netdev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-netdev.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-default-on.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/etc/modules.d; ( echo "ledtrig-default-on"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/etc/modules.d/50-ledtrig-default-on; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/etc/modules-boot.d; ln -sf ../modules.d/50-ledtrig-default-on /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-default-on.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-timer.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/etc/modules.d; ( echo "ledtrig-timer"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/etc/modules.d/50-ledtrig-timer; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/etc/modules-boot.d; ln -sf ../modules.d/50-ledtrig-timer /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-timer.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-transient.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/etc/modules.d; ( echo "ledtrig-transient"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/etc/modules.d/50-ledtrig-transient; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/etc/modules-boot.d; ln -sf ../modules.d/50-ledtrig-transient /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/trigger/ledtrig-oneshot.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot/etc/modules.d; ( echo "ledtrig-oneshot"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot/etc/modules.d/50-ledtrig-oneshot; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-pca963x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/etc/modules.d; ( echo "leds-pca963x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/etc/modules.d/60-leds-pca963x; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/etc/modules-boot.d; ln -sf ../modules.d/60-leds-pca963x /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crc-ccitt.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt/etc/modules.d; ( echo "crc-ccitt"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt/etc/modules.d/lib-crc-ccitt; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crc-itu-t.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t/etc/modules.d; ( echo "crc-itu-t"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t/etc/modules.d/lib-crc-itu-t; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crc7.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7/etc/modules.d; ( echo "crc7"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7/etc/modules.d/lib-crc7; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crc8.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8/etc/modules.d; ( echo "crc8"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8/etc/modules.d/lib-crc8; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/crc16.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/etc/modules.d; ( echo "crc16"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/etc/modules.d/20-lib-crc16; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/etc/modules-boot.d; ln -sf ../modules.d/20-lib-crc16 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/libcrc32c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c/etc/modules.d; ( echo "libcrc32c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c/etc/modules.d/lib-crc32c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/lzo.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/lzo/lzo_compress.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/lzo/lzo_decompress.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo/etc/modules.d; ( echo "lzo"; echo "lzo_compress"; echo "lzo_decompress"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo/etc/modules.d/lib-lzo; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/xxhash.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/zstd/zstd_compress.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/zstd/zstd_decompress.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd/etc/modules.d; ( echo "xxhash"; echo "zstd_compress"; echo "zstd_decompress"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd/etc/modules.d/lib-zstd; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/lz4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/lz4/lz4_compress.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/lz4/lz4_decompress.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4/etc/modules.d; ( echo "lz4"; echo "lz4_compress"; echo "lz4_decompress"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4/etc/modules.d/lib-lz4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/raid6/raid6_pq.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6/etc/modules.d; ( echo "raid6_pq"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6/etc/modules.d/lib-raid6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/crypto/xor.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor/etc/modules.d; ( echo "xor"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor/etc/modules.d/lib-xor; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/ts_kmp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/ts_bm.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/ts_fsm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch/etc/modules.d; ( echo "ts_bm"; echo "ts_fsm"; echo "ts_kmp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch/etc/modules.d/lib-textsearch; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/zlib_inflate/zlib_inflate.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate/etc/modules.d; ( echo "zlib_inflate"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate/etc/modules.d/lib-zlib-inflate; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/zlib_deflate/zlib_deflate.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate/etc/modules.d; ( echo "zlib_deflate"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate/etc/modules.d/lib-zlib-deflate; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/math/cordic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic/etc/modules.d; ( echo "cordic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic/etc/modules.d/lib-cordic; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/asn1_decoder.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/sis/sis190.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190/etc/modules.d; ( echo "sis190"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190/etc/modules.d/sis190; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/marvell/skge.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge/etc/modules.d; ( echo "skge"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge/etc/modules.d/skge; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/atheros/alx/alx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx/etc/modules.d; ( echo "alx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx/etc/modules.d/alx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/atheros/atlx/atl2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2/etc/modules.d; ( echo "atl2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2/etc/modules.d/atl2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/atheros/atlx/atl1.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1/etc/modules.d; ( echo "atl1"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1/etc/modules.d/atl1; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/atheros/atl1c/atl1c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c/etc/modules.d; ( echo "atl1c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c/etc/modules.d/atl1c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/atheros/atl1e/atl1e.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e/etc/modules.d; ( echo "atl1e"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e/etc/modules.d/atl1e; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/libphy.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/etc/modules.d; ( echo "libphy"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/etc/modules.d/15-libphy; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/etc/modules-boot.d; ln -sf ../modules.d/15-libphy /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/libphy.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylink.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylink mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylink true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylink.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/mii.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/etc/modules.d; ( echo "mii"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/etc/modules.d/15-mii; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/etc/modules-boot.d; ln -sf ../modules.d/15-mii /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-gpio.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-bitbang.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio/etc/modules.d; ( echo "mdio-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio/etc/modules.d/mdio-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-gpio.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mdio-bitbang.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/agere/et131x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x/etc/modules.d; ( echo "et131x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x/etc/modules.d/et131x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/bcm-phy-lib.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom/etc/modules.d; ( echo "bcm-phy-lib"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom/etc/modules.d/17-phylib-broadcom; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/bcm-phy-lib.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/broadcom.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/etc/modules.d; ( echo "broadcom"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/etc/modules.d/18-phy-broadcom; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/etc/modules-boot.d; ln -sf ../modules.d/18-phy-broadcom /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/broadcom.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/bcm84881.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/etc/modules.d; ( echo "bcm84881"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/etc/modules.d/18-phy-bcm84881; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/etc/modules-boot.d; ln -sf ../modules.d/18-phy-bcm84881 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/realtek.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/etc/modules.d; ( echo "realtek"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/etc/modules.d/18-phy-realtek; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/etc/modules-boot.d; ln -sf ../modules.d/18-phy-realtek /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/swconfig.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig/etc/modules.d; ( echo "swconfig"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig/etc/modules.d/41-swconfig; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/swconfig.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/b53/b53_common.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx/etc/modules.d; ( echo "b53_common"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx/etc/modules.d/42-switch-bcm53xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/b53/b53_mdio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio/etc/modules.d; ( echo "b53_mdio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio/etc/modules.d/42-switch-bcm53xx-mdio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/mvsw61xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx/etc/modules.d; ( echo "mvsw61xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx/etc/modules.d/42-switch-mvsw61xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/ip17xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx/etc/modules.d; ( echo "ip17xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx/etc/modules.d/42-switch-ip17xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/ip17xx.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8306.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306/etc/modules.d; ( echo "rtl8306"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306/etc/modules.d/43-switch-rtl8306; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366_smi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/etc/modules.d; ( echo "rtl8366_smi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/etc/modules.d/42-switch-rtl8366-smi; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/etc/modules-boot.d; ln -sf ../modules.d/42-switch-rtl8366-smi /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366_smi.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366rb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb/etc/modules.d; ( echo "rtl8366rb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb/etc/modules.d/43-switch-rtl8366rb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366rb.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366s.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s/etc/modules.d; ( echo "rtl8366s"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s/etc/modules.d/43-switch-rtl8366s; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8366s.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/rtl8367b.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/etc/modules.d; ( echo "rtl8367b"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/etc/modules.d/43-switch-rtl8367b; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/etc/modules-boot.d; ln -sf ../modules.d/43-switch-rtl8367b /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/natsemi/natsemi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi/etc/modules.d; ( echo "natsemi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi/etc/modules.d/20-natsemi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/rdc/r6040.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040/etc/modules.d; ( echo "r6040"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040/etc/modules.d/r6040; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/sun/niu.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu/etc/modules.d; ( echo "niu"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu/etc/modules.d/niu; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/sis/sis900.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900/etc/modules.d; ( echo "sis900"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900/etc/modules.d/sis900; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/marvell/sky2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2/etc/modules.d; ( echo "sky2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2/etc/modules.d/sky2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/via/via-rhine.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine/etc/modules.d; ( echo "via-rhine"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine/etc/modules.d/via-rhine; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/via/via-velocity.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity/etc/modules.d; ( echo "via-velocity"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity/etc/modules.d/via-velocity; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/realtek/8139too.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too/etc/modules.d; ( echo "8139too"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too/etc/modules.d/8139too; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/realtek/8139cp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp/etc/modules.d; ( echo "8139cp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp/etc/modules.d/8139cp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/realtek/r8169.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169/etc/modules.d; ( echo "r8169"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169/etc/modules.d/r8169; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/8390/ne2k-pci.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/8390/8390.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci/etc/modules.d; ( echo "8390"; echo "ne2k-pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci/etc/modules.d/ne2k-pci; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/intel/e100.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100/etc/modules.d; ( echo "e100"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100/etc/modules.d/e100; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/intel/e1000/e1000.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000/etc/modules.d; ( echo "e1000"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000/etc/modules.d/35-e1000; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/intel/igb/igb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb/etc/modules.d; ( echo "igb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb/etc/modules.d/35-igb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/intel/ixgbe/ixgbe.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe/etc/modules.d; ( echo "ixgbe"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe/etc/modules.d/35-ixgbe; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbevf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbevf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbevf true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbevf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/intel/i40e/i40e.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e/etc/modules.d; ( echo "i40e"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e/etc/modules.d/i40e; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iavf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iavf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iavf true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iavf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/broadcom/b44.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/etc/modules.d; ( echo "b44"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/etc/modules.d/19-b44; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/etc/modules-boot.d; ln -sf ../modules.d/19-b44 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/3com/3c59x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x/etc/modules.d; ( echo "3c59x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x/etc/modules.d/3c59x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/amd/pcnet32.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32/etc/modules.d; ( echo "pcnet32"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32/etc/modules.d/pcnet32; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/broadcom/tg3.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/etc/modules.d; ( echo "tg3"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/etc/modules.d/19-tg3; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/etc/modules-boot.d; ln -sf ../modules.d/19-tg3 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/hardware/mISDN/hfcpci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci/etc/modules.d; ( echo "hfcpci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci/etc/modules.d/31-hfcpci; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/hardware/mISDN/hfcmulti.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti/etc/modules.d; ( echo "hfcmulti"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti/etc/modules.d/31-hfcmulti; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/macvlan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan/etc/modules.d; ( echo "macvlan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan/etc/modules.d/macvlan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/dec/tulip/tulip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/dec/tulip/de2104x.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/dec/tulip/dmfe.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/dec/tulip/uli526x.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/dec/tulip/winbond-840.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip/etc/modules.d; ( echo "tulip"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip/etc/modules.d/tulip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/atm/solos-pci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci/etc/modules.d; ( echo "solos-pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci/etc/modules.d/solos-pci; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/dummy.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy/etc/modules.d; ( echo "dummy"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy/etc/modules.d/34-dummy; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ifb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb/etc/modules.d; ( echo "ifb numifbs=0"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb/etc/modules.d/34-ifb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/davicom/dm9000.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000/etc/modules.d; ( echo "dm9000"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000/etc/modules.d/34-dm9000; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/nvidia/forcedeth.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth/etc/modules.d; ( echo "forcedeth"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth/etc/modules.d/forcedeth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/fixed_phy.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/of/of_mdio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio/etc/modules.d; ( echo "of_mdio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio/etc/modules.d/41-of-mdio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/fixed_phy.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/of/of_mdio.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/vmxnet3/vmxnet3.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3/etc/modules.d; ( echo "vmxnet3"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3/etc/modules.d/35-vmxnet3; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/phy/spi_ks8995.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995/etc/modules.d; ( echo "spi_ks8995"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995/etc/modules.d/50-spi-ks8995; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/ethoc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc/etc/modules.d; ( echo "ethoc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc/etc/modules.d/ethoc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/broadcom/bnx2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2/etc/modules.d; ( echo "bnx2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2/etc/modules.d/bnx2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/broadcom/bnx2x/bnx2x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x/etc/modules.d; ( echo "bnx2x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x/etc/modules.d/bnx2x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/emulex/benet/be2net.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net/etc/modules.d; ( echo "be2net"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net/etc/modules.d/be2net; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/mellanox/mlx4/mlx4_core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/mellanox/mlx4/mlx4_en.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core/etc/modules.d; ( echo "mlx4_core"; echo "mlx4_en"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core/etc/modules.d/mlx4-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core/etc/modules.d; ( echo "mlx5_core"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core/etc/modules.d/mlx5-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sfp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sfp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sfp true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sfp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_reject_ipv4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject/etc/modules.d; ( echo "nf_reject_ipv4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject/etc/modules.d/nf-reject; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_reject_ipv6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6/etc/modules.d; ( echo "nf_reject_ipv6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6/etc/modules.d/nf-reject6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ip_tables.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/x_tables.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt/etc/modules.d; ( echo "ip_tables"; echo "x_tables"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt/etc/modules.d/nf-ipt; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6_tables.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6/etc/modules.d; ( echo "ip6_tables"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6/etc/modules.d/nf-ipt6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_tcpudp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/iptable_filter.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/iptable_mangle.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_limit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_mac.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_multiport.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_comment.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_LOG.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_log_common.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_log_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_TCPMSS.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ipt_REJECT.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_time.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_mark.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core/etc/modules.d; ( echo "ipt_REJECT"; echo "iptable_filter"; echo "iptable_mangle"; echo "nf_log_common"; echo "nf_log_ipv4"; echo "xt_LOG"; echo "xt_TCPMSS"; echo "xt_comment"; echo "xt_limit"; echo "xt_mac"; echo "xt_mark"; echo "xt_multiport"; echo "xt_tcpudp"; echo "xt_time"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core/etc/modules.d/ipt-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_rtcache.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_defrag_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_defrag_ipv6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/etc/modules.d; ( echo "nf_conntrack"; echo "nf_conntrack_rtcache"; echo "nf_defrag_ipv4"; echo "nf_defrag_ipv6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/etc/modules.d/nf-conntrack; install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/etc/sysctl.d install -m0644 ./files/sysctl-nf-conntrack.conf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/etc/sysctl.d/11-nf-conntrack.conf touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack6 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat/etc/modules.d; ( echo "nf_nat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat/etc/modules.d/nf-nat; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat6 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_flow_table.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_flow_table_hw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow/etc/modules.d; ( echo "nf_flow_table"; echo "nf_flow_table_hw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow/etc/modules.d/nf-flow; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_state.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_CT.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_conntrack.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack/etc/modules.d; ( echo "xt_CT"; echo "xt_conntrack"; echo "xt_state"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack/etc/modules.d/ipt-conntrack; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_connbytes.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_connlimit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conncount.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_connmark.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_helper.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_recent.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra/etc/modules.d; ( echo "nf_conncount"; echo "xt_connbytes"; echo "xt_connlimit"; echo "xt_connmark"; echo "xt_helper"; echo "xt_recent"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra/etc/modules.d/ipt-conntrack-extra; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_connlabel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label/etc/modules.d; ( echo "xt_connlabel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label/etc/modules.d/ipt-conntrack-label; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_string.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_bpf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter/etc/modules.d; ( echo "xt_bpf"; echo "xt_string"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter/etc/modules.d/ipt-filter; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_FLOWOFFLOAD.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload/etc/modules.d; ( echo "xt_FLOWOFFLOAD"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload/etc/modules.d/ipt-offload; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_dscp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_DSCP.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_length.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_statistic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_tcpmss.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_CLASSIFY.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ipt_ECN.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_ecn.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_hl.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_HL.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt/etc/modules.d; ( echo "ipt_ECN"; echo "xt_CLASSIFY"; echo "xt_DSCP"; echo "xt_HL"; echo "xt_dscp"; echo "xt_ecn"; echo "xt_hl"; echo "xt_length"; echo "xt_statistic"; echo "xt_tcpmss"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt/etc/modules.d/ipt-ipopt; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ipt_ah.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_esp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_policy.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec/etc/modules.d; ( echo "ipt_ah"; echo "xt_esp"; echo "xt_policy"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec/etc/modules.d/ipt-ipsec; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_bitmap_ip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_bitmap_ipmac.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_bitmap_port.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_ip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_ipmark.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_ipport.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_ipportip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_ipportnet.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_mac.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_netportnet.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_net.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_netnet.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_netport.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_hash_netiface.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipset/ip_set_list_set.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_set.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset/etc/modules.d; ( echo "ip_set"; echo "ip_set_bitmap_ip"; echo "ip_set_bitmap_ipmac"; echo "ip_set_bitmap_port"; echo "ip_set_hash_ip"; echo "ip_set_hash_ipmark"; echo "ip_set_hash_ipport"; echo "ip_set_hash_ipportip"; echo "ip_set_hash_ipportnet"; echo "ip_set_hash_mac"; echo "ip_set_hash_net"; echo "ip_set_hash_netiface"; echo "ip_set_hash_netnet"; echo "ip_set_hash_netport"; echo "ip_set_hash_netportnet"; echo "ip_set_list_set"; echo "xt_set"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset/etc/modules.d/49-ipt-ipset; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_lc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_wlc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_rr.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_wrr.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_lblc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_lblcr.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_dh.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_sh.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_fo.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_ovf.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_nq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_sed.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_ipvs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_ftp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/ipvs/ip_vs_pe_sip.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_nat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/iptable_nat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_MASQUERADE.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_REDIRECT.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat/etc/modules.d; ( echo "iptable_nat"; echo "xt_MASQUERADE"; echo "xt_REDIRECT"; echo "xt_nat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat/etc/modules.d/ipt-nat; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/iptable_raw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw/etc/modules.d; ( echo "iptable_raw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw/etc/modules.d/ipt-raw; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6table_raw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6/etc/modules.d; ( echo "ip6table_raw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6/etc/modules.d/ipt-raw6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6table_nat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_NPT.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6/etc/modules.d; ( echo "ip6t_NPT"; echo "ip6table_nat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6/etc/modules.d/43-ipt-nat6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_NETMAP.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra/etc/modules.d; ( echo "xt_NETMAP"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra/etc/modules.d/ipt-nat-extra; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_ftp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat_ftp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper/etc/modules.d; ( echo "nf_conntrack_ftp"; echo "nf_nat_ftp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper/etc/modules.d/nf-nathelper; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_broadcast.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_amanda.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat_amanda.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_h323.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_nat_h323.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_pptp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_nat_pptp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_sip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat_sip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_snmp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_nat_snmp_basic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_tftp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat_tftp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_irc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_nat_irc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra/etc/modules.d; ( echo "nf_conntrack_amanda"; echo "nf_conntrack_broadcast"; echo "nf_conntrack_h323"; echo "nf_conntrack_irc"; echo "nf_conntrack_pptp"; echo "nf_conntrack_sip"; echo "nf_conntrack_snmp"; echo "nf_conntrack_tftp"; echo "nf_nat_amanda"; echo "nf_nat_h323"; echo "nf_nat_irc"; echo "nf_nat_pptp"; echo "nf_nat_sip"; echo "nf_nat_snmp_basic"; echo "nf_nat_tftp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra/etc/modules.d/nf-nathelper-extra; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ulog.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ulog mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ulog true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ulog.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_NFLOG.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog/etc/modules.d; ( echo "xt_NFLOG"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog/etc/modules.d/ipt-nflog; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_NFQUEUE.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue/etc/modules.d; ( echo "xt_NFQUEUE"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue/etc/modules.d/ipt-nfqueue; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_TRACE.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug/etc/modules.d; ( echo "xt_TRACE"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug/etc/modules.d/ipt-debug; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_LED.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led/etc/modules.d; ( echo "xt_LED"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led/etc/modules.d/ipt-led; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_socket.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_socket_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_socket_ipv6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_TPROXY.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_tproxy_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_tproxy_ipv6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy/etc/modules.d; ( echo "nf_socket_ipv4"; echo "nf_socket_ipv6"; echo "nf_tproxy_ipv4"; echo "nf_tproxy_ipv6"; echo "xt_TPROXY"; echo "xt_socket"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy/etc/modules.d/ipt-tproxy; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_TEE.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_dup_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_dup_ipv6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee/etc/modules.d; ( echo "nf_dup_ipv4"; echo "nf_dup_ipv6"; echo "nf_tee"; echo "xt_TEE"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee/etc/modules.d/ipt-tee; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_u32.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32/etc/modules.d; ( echo "nf_tee"; echo "xt_u32"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32/etc/modules.d/ipt-u32; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_CHECKSUM.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum/etc/modules.d; ( echo "xt_CHECKSUM"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum/etc/modules.d/ipt-checksum; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_iprange.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange/etc/modules.d; ( echo "xt_iprange"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange/etc/modules.d/ipt-iprange; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_cluster.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster/etc/modules.d; ( echo "xt_cluster"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster/etc/modules.d/ipt-cluster; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ipt_CLUSTERIP.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip/etc/modules.d; ( echo "ipt_CLUSTERIP"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip/etc/modules.d/ipt-clusterip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_addrtype.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_owner.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_pkttype.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_quota.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra/etc/modules.d; ( echo "xt_addrtype"; echo "xt_owner"; echo "xt_pkttype"; echo "xt_quota"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra/etc/modules.d/ipt-extra; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_physdev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev/etc/modules.d; ( echo "xt_physdev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev/etc/modules.d/ipt-physdev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6table_filter.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6table_mangle.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_log_ipv6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_REJECT.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables/etc/modules.d; ( echo "ip6t_REJECT"; echo "ip6table_filter"; echo "ip6table_mangle"; echo "nf_log_ipv6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables/etc/modules.d/42-ip6tables; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_ipv6header.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_ah.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_mh.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_eui64.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_hbh.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_frag.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_rt.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra/etc/modules.d; ( echo "ip6t_ah"; echo "ip6t_eui64"; echo "ip6t_frag"; echo "ip6t_hbh"; echo "ip6t_ipv6header"; echo "ip6t_mh"; echo "ip6t_rt"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra/etc/modules.d/43-ip6tables-extra; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/arp*.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables/etc/modules.d; ( echo "arp_tables"; echo "arpt_mangle"; echo "arptable_filter"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables/etc/modules.d/arptables; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/br_netfilter.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/etc/modules.d; ( echo "br_netfilter"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/etc/modules.d/br-netfilter; install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/etc/sysctl.d install -m0644 ./files/sysctl-br-netfilter.conf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/etc/sysctl.d/11-br-netfilter.conf touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebtables.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebtable_broute.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebtable_filter.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebtable_nat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_802_3.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_among.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_limit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_mark_m.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_pkttype.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_stp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_vlan.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_mark.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_redirect.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables/etc/modules.d; ( echo "ebt_802_3"; echo "ebt_among"; echo "ebt_limit"; echo "ebt_mark"; echo "ebt_mark_m"; echo "ebt_pkttype"; echo "ebt_redirect"; echo "ebt_stp"; echo "ebt_vlan"; echo "ebtable_broute"; echo "ebtable_filter"; echo "ebtable_nat"; echo "ebtables"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables/etc/modules.d/ebtables; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_arp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_ip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_arpreply.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_dnat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_snat.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4/etc/modules.d; ( echo "ebt_arp"; echo "ebt_arpreply"; echo "ebt_dnat"; echo "ebt_ip"; echo "ebt_snat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4/etc/modules.d/ebtables-ipv4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_ip6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6/etc/modules.d; ( echo "ebt_ip6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6/etc/modules.d/ebtables-ipv6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_log.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/ebt_nflog.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers/etc/modules.d; ( echo "ebt_log"; echo "ebt_nflog"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers/etc/modules.d/ebtables-watchers; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nfnetlink.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink/etc/modules.d; ( echo "nfnetlink"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink/etc/modules.d/nfnetlink; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nfnetlink_log.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log/etc/modules.d; ( echo "nfnetlink_log"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log/etc/modules.d/nfnetlink-log; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nfnetlink_queue.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue/etc/modules.d; ( echo "nfnetlink_queue"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue/etc/modules.d/nfnetlink-queue; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_conntrack_netlink.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink/etc/modules.d; ( echo "nf_conntrack_netlink"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink/etc/modules.d/nf-conntrack-netlink; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/xt_hashlimit.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit/etc/modules.d; ( echo "xt_hashlimit"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit/etc/modules.d/ipt-hashlimit; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/ipt_rpfilter.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/ip6t_rpfilter.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter/etc/modules.d; ( echo "ip6t_rpfilter"; echo "ipt_rpfilter"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter/etc/modules.d/ipt-rpfilter; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_tables.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_tables_set.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_counter.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_ct.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_hash.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_limit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_log.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_numgen.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_objref.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_quota.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_redir.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_reject.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nft_reject_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nft_reject_ipv6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_reject_inet.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core/etc/modules.d; ( echo "nf_tables"; echo "nf_tables_set"; echo "nft_counter"; echo "nft_ct"; echo "nft_hash"; echo "nft_limit"; echo "nft_log"; echo "nft_numgen"; echo "nft_objref"; echo "nft_quota"; echo "nft_redir"; echo "nft_reject"; echo "nft_reject_inet"; echo "nft_reject_ipv4"; echo "nft_reject_ipv6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core/etc/modules.d/nft-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp for mod in ; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/nft_meta_bridge.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bridge/netfilter/nft_reject_bridge.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge/etc/modules.d; ( echo "nft_meta_bridge"; echo "nft_reject_bridge"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge/etc/modules.d/nft-bridge; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_nat.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_masq.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat/etc/modules.d; ( echo "nft_masq"; echo "nft_nat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat/etc/modules.d/nft-nat; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_flow_table_inet.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nf_flow_table_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nf_flow_table_ipv6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_flow_offload.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload/etc/modules.d; ( echo "nf_flow_table_inet"; echo "nf_flow_table_ipv4"; echo "nf_flow_table_ipv6"; echo "nft_flow_offload"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload/etc/modules.d/nft-offload; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat6 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nf_dup_netdev.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_dup_netdev.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_fwd_netdev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev/etc/modules.d; ( echo "nf_dup_netdev"; echo "nf_tables_netdev"; echo "nft_dup_netdev"; echo "nft_fwd_netdev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev/etc/modules.d/nft-netdev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_fib.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netfilter/nft_fib_inet.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/netfilter/nft_fib_ipv4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/netfilter/nft_fib_ipv6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib/etc/modules.d; ( echo "nft_fib"; echo "nft_fib_inet"; echo "nft_fib_ipv4"; echo "nft_fib_ipv6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib/etc/modules.d/nft-fib; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/atm/atm.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/atm/br2684.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm/etc/modules.d; ( echo "atm"; echo "br2684"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm/etc/modules.d/30-atm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/atm/atmtcp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp/etc/modules.d; ( echo "atmtcp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp/etc/modules.d/40-atmtcp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/bonding/bonding.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding/etc/modules.d; ( echo "bonding"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding/etc/modules.d/40-bonding; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/udp_tunnel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4/etc/modules.d; ( echo "udp_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4/etc/modules.d/32-udptunnel4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ip6_udp_tunnel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6/etc/modules.d; ( echo "ip6_udp_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6/etc/modules.d/32-udptunnel6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/vxlan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan/etc/modules.d; ( echo "vxlan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan/etc/modules.d/13-vxlan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/geneve.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve/etc/modules.d; ( echo "geneve"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve/etc/modules.d/13-geneve; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/nsh/nsh.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh/etc/modules.d; ( echo "nsh"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh/etc/modules.d/13-nsh; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/capi/kernelcapi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/capi/capi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi/etc/modules.d; ( echo "capi"; echo "kernelcapi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi/etc/modules.d/30-capi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/mISDN/mISDN_core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/mISDN/mISDN_dsp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/isdn/mISDN/l1oip.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn/etc/modules.d; ( echo "l1oip"; echo "mISDN_core"; echo "mISDN_dsp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn/etc/modules.d/30-misdn; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-isdn4linux.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-isdn4linux mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-isdn4linux true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-isdn4linux.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ipip.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip/etc/modules.d; ( echo "ipip"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip/etc/modules.d/32-ipip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/xfrm/xfrm_algo.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/xfrm/xfrm_ipcomp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/xfrm/xfrm_user.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/key/af_key.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec/etc/modules.d; ( echo "af_key"; echo "xfrm_algo"; echo "xfrm_ipcomp"; echo "xfrm_user"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec/etc/modules.d/30-ipsec; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ah4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/esp4.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/xfrm4_tunnel.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ipcomp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4/etc/modules.d; ( echo "ah4"; echo "esp4"; echo "ipcomp"; echo "xfrm4_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4/etc/modules.d/32-ipsec4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ah6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/esp6.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/xfrm6_tunnel.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ipcomp6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6/etc/modules.d; ( echo "ah6"; echo "esp6"; echo "ipcomp6"; echo "xfrm6_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6/etc/modules.d/32-ipsec6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ip_tunnel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel/etc/modules.d; ( echo "ip_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel/etc/modules.d/31-iptunnel; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ip_vti.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti/etc/modules.d; ( echo "ip_vti"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti/etc/modules.d/33-ip-vti; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ip6_vti.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti/etc/modules.d; ( echo "ip6_vti"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti/etc/modules.d/33-ip6-vti; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/xfrm/xfrm_interface.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface/etc/modules.d; ( echo "xfrm_interface"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface/etc/modules.d/xfrm-interface; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/tunnel4.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4/etc/modules.d; ( echo "tunnel4"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4/etc/modules.d/31-iptunnel4; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/tunnel6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6/etc/modules.d; ( echo "tunnel6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6/etc/modules.d/31-iptunnel6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/sit.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit/etc/modules.d; ( echo "sit"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit/etc/modules.d/32-sit; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/fou.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou/etc/modules.d; ( echo "fou"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou/etc/modules.d/fou; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/fou6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6/etc/modules.d; ( echo "fou6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6/etc/modules.d/fou6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ip6_tunnel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel/etc/modules.d; ( echo "ip6_tunnel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel/etc/modules.d/32-ip6-tunnel; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/ip_gre.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/gre.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre/etc/modules.d; ( echo "gre"; echo "ip_gre"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre/etc/modules.d/39-gre; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv6/ip6_gre.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6/etc/modules.d; ( echo "ip6_gre"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6/etc/modules.d/39-gre6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/tun.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun/etc/modules.d; ( echo "tun"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun/etc/modules.d/30-tun; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/veth.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth/etc/modules.d; ( echo "veth"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth/etc/modules.d/30-veth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/slip/slhc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/ppp_async.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/ppp_generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp/etc/modules.d; ( echo "ppp_async"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp/etc/modules.d/ppp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/ppp_synctty.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty/etc/modules.d; ( echo "ppp_synctty"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty/etc/modules.d/ppp-synctty; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/pppox.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/pppoe.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe/etc/modules.d; ( echo "pppoe"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe/etc/modules.d/pppoe; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/atm/pppoatm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa/etc/modules.d; ( echo "pppoatm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa/etc/modules.d/40-pppoa; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/pptp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp/etc/modules.d; ( echo "pptp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp/etc/modules.d/pptp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_ppp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp/etc/modules.d; ( echo "l2tp_ppp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp/etc/modules.d/pppol2tp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/atm/clip.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa/etc/modules.d; ( echo "clip"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa/etc/modules.d/ipoa; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ppp/ppp_mppe.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe/etc/modules.d; ( echo "ppp_mppe"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe/etc/modules.d/mppe; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_fw.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_route.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_u32.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_skbedit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_tbf.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_basic.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_ingress.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_htb.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_hfsc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_u32.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_matchall.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_mirred.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_flow.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_tcindex.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core/etc/modules.d; ( echo "act_mirred"; echo "act_skbedit"; echo "cls_basic"; echo "cls_flow"; echo "cls_fw"; echo "cls_matchall"; echo "cls_route"; echo "cls_tcindex"; echo "cls_u32"; echo "em_u32"; echo "sch_fq_codel"; echo "sch_hfsc"; echo "sch_htb"; echo "sch_ingress"; echo "sch_tbf"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core/etc/modules.d/70-sched-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_cake.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake/etc/modules.d; ( echo "sch_cake"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake/etc/modules.d/sched-cake; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_flower.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower/etc/modules.d; ( echo "cls_flower"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower/etc/modules.d/sched-flower; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_vlan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan/etc/modules.d; ( echo "act_vlan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan/etc/modules.d/sched-act-vlan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_mqprio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio/etc/modules.d; ( echo "sch_mqprio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio/etc/modules.d/sched-mqprio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_connmark.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark/etc/modules.d; ( echo "act_connmark"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark/etc/modules.d/71-sched-connmark; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_ctinfo.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo/etc/modules.d; ( echo "act_ctinfo"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo/etc/modules.d/71-sched-ctinfo; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_ipset.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset/etc/modules.d; ( echo "em_ipset"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset/etc/modules.d/72-sched-ipset; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/cls_bpf.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_bpf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf/etc/modules.d; ( echo "act_bpf"; echo "cls_bpf"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf/etc/modules.d/72-sched-bpf; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/lib/test_bpf.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_red.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_dsmark.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_codel.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_pedit.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_police.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_cmp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_gact.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_meta.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_teql.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_multiq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_nbyte.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/em_text.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_gred.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_prio.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_csum.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_fq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_ipt.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/act_simple.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_sfq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_pie.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched/etc/modules.d; ( echo "act_csum"; echo "act_gact"; echo "act_ipt"; echo "act_pedit"; echo "act_police"; echo "act_simple"; echo "em_cmp"; echo "em_meta"; echo "em_nbyte"; echo "em_text"; echo "sch_codel"; echo "sch_dsmark"; echo "sch_fq"; echo "sch_gred"; echo "sch_multiq"; echo "sch_pie"; echo "sch_prio"; echo "sch_red"; echo "sch_sfq"; echo "sch_teql"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched/etc/modules.d/73-sched; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ipv4/tcp_bbr.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/etc/modules.d; ( echo "tcp_bbr"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/etc/modules.d/74-tcp-bbr; install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/etc/sysctl.d install -m0644 ./files/sysctl-tcp-bbr.conf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/etc/sysctl.d/12-tcp-bbr.conf touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ax25/ax25.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/hamradio/mkiss.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25/etc/modules.d; ( echo "ax25"; echo "mkiss"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25/etc/modules.d/80-ax25; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/core/pktgen.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen/etc/modules.d; ( echo "pktgen"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen/etc/modules.d/99-pktgen; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_netlink.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp/etc/modules.d; ( echo "l2tp_core"; echo "l2tp_netlink"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp/etc/modules.d/32-l2tp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_eth.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth/etc/modules.d; ( echo "l2tp_eth"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth/etc/modules.d/33-l2tp-eth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_ip.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/l2tp/l2tp_ip6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip/etc/modules.d; ( echo "l2tp_ip"; echo "l2tp_ip6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip/etc/modules.d/33-l2tp-ip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sctp/sctp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp/etc/modules.d; ( echo "sctp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp/etc/modules.d/32-sctp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/sched/sch_netem.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem/etc/modules.d; ( echo "netem"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem/etc/modules.d/99-netem; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/slip/slip.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip/etc/modules.d; ( echo "slip"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip/etc/modules.d/30-slip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/dns_resolver/dns_resolver.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver/etc/modules.d; ( echo "dns_resolver"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver/etc/modules.d/30-dnsresolver; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/mpls/mpls_gso.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/mpls/mpls_iptunnel.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/mpls/mpls_router.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls/etc/modules.d; ( echo "mpls_gso"; echo "mpls_iptunnel"; echo "mpls_router"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls/etc/modules.d/30-mpls; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/nlmon.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon/etc/modules.d; ( echo "nlmon"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon/etc/modules.d/nlmon; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/mdio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio/etc/modules.d; ( echo "mdio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio/etc/modules.d/32-mdio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/macsec.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec/etc/modules.d; ( echo "macsec"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec/etc/modules.d/13-macsec; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/netlink/netlink_diag.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag/etc/modules.d; ( echo "netlink-diag"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag/etc/modules.d/31-netlink-diag; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_base.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp437.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437/etc/modules.d; ( echo "nls_cp437"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437/etc/modules.d/25-nls-cp437; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp775.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775/etc/modules.d; ( echo "nls_cp775"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775/etc/modules.d/25-nls-cp775; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp850.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850/etc/modules.d; ( echo "nls_cp850"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850/etc/modules.d/25-nls-cp850; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp852.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852/etc/modules.d; ( echo "nls_cp852"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852/etc/modules.d/25-nls-cp852; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp862.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862/etc/modules.d; ( echo "nls_cp862"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862/etc/modules.d/25-nls-cp862; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp864.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864/etc/modules.d; ( echo "nls_cp864"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864/etc/modules.d/25-nls-cp864; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp866.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866/etc/modules.d; ( echo "nls_cp866"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866/etc/modules.d/25-nls-cp866; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp932.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932/etc/modules.d; ( echo "nls_cp932"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932/etc/modules.d/25-nls-cp932; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp936.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936/etc/modules.d; ( echo "nls_cp936"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936/etc/modules.d/25-nls-cp936; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp950.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950/etc/modules.d; ( echo "nls_cp950"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950/etc/modules.d/25-nls-cp950; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp1250.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250/etc/modules.d; ( echo "nls_cp1250"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250/etc/modules.d/25-nls-cp1250; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp1251.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251/etc/modules.d; ( echo "nls_cp1251"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251/etc/modules.d/25-nls-cp1251; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_iso8859-1.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1/etc/modules.d; ( echo "nls_iso8859-1"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1/etc/modules.d/25-nls-iso8859-1; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_iso8859-2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2/etc/modules.d; ( echo "nls_iso8859-2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2/etc/modules.d/25-nls-iso8859-2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_iso8859-6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6/etc/modules.d; ( echo "nls_iso8859-6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6/etc/modules.d/25-nls-iso8859-6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_cp1255.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8/etc/modules.d; ( echo "nls_cp1255"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8/etc/modules.d/25-nls-iso8859-8; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_iso8859-13.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13/etc/modules.d; ( echo "nls_iso8859-13"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13/etc/modules.d/25-nls-iso8859-13; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_iso8859-15.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15/etc/modules.d; ( echo "nls_iso8859-15"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15/etc/modules.d/25-nls-iso8859-15; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_koi8-r.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r/etc/modules.d; ( echo "nls_koi8-r"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r/etc/modules.d/25-nls-koi8r; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/fs/nls/nls_utf8.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8/etc/modules.d; ( echo "nls_utf8"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8/etc/modules.d/25-nls-utf8; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/6lowpan/6lowpan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan/etc/modules.d; ( echo "6lowpan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan/etc/modules.d/6lowpan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bluetooth/bluetooth.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bluetooth/rfcomm/rfcomm.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bluetooth/bnep/bnep.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bluetooth/hidp/hidp.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/hci_uart.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/btusb.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/btintel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth/etc/modules.d; ( echo "bluetooth"; echo "bnep"; echo "btusb"; echo "hci_uart"; echo "hidp"; echo "rfcomm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth/etc/modules.d/bluetooth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/ath3k.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k/etc/modules.d; ( echo "ath3k"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k/etc/modules.d/ath3k; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/bluetooth/bluetooth_6lowpan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan/etc/modules.d; ( echo "bluetooth_6lowpan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan/etc/modules.d/bluetooth-6lowpan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/btmrvl.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bluetooth/btmrvl_sdio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl/etc/modules.d; ( echo "btmrvl"; echo "btmrvl_sdio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl/etc/modules.d/btmrvl; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/dma-buf/dma-shared-buffer.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf/etc/modules.d; ( echo "dma-shared-buffer"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf/etc/modules.d/20-dma-buf; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/misc/eeprom/eeprom_93cx6.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6/etc/modules.d; ( echo "eeprom_93cx6"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6/etc/modules.d/20-eeprom-93cx6; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/misc/eeprom/at24.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24/etc/modules.d; ( echo "at24"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24/etc/modules.d/eeprom-at24; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/misc/eeprom/at25.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25/etc/modules.d; ( echo "at25"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25/etc/modules.d/eeprom-at25; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-dev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-dev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-dev true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-dev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/pinctrl/pinctrl-mcp23s08.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08/etc/modules.d; ( echo "pinctrl-mcp23s08"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08/etc/modules.d/40-gpio-mcp23s08; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-74x164.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164/etc/modules.d; ( echo "gpio-74x164"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164/etc/modules.d/gpio-nxp-74hc164; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-74x164.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-pca953x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x/etc/modules.d; ( echo "gpio-pca953x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x/etc/modules.d/55-gpio-pca953x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-pca953x.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/gpio/gpio-pcf857x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x/etc/modules.d; ( echo "gpio-pcf857x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x/etc/modules.d/55-gpio-pcf857x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/parport/parport.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/ppdev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev/etc/modules.d; ( echo "parport"; echo "ppdev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev/etc/modules.d/50-ppdev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-parport-pc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-parport-pc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-parport-pc true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-parport-pc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/lp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp/etc/modules.d; ( echo "lp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp/etc/modules.d/52-lp; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/core/mmc_core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/core/mmc_block.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/etc/modules.d; ( echo "mmc_block"; echo "mmc_core"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/etc/modules.d/mmc; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/etc/modules-boot.d; ln -sf ../modules.d/mmc /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/host/sdhci.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/host/sdhci-pltfm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/etc/modules.d; ( echo "sdhci-pltfm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/etc/modules.d/sdhci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/etc/modules-boot.d; ln -sf ../modules.d/sdhci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/watchdog/softdog.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/etc/modules.d; ( echo "softdog"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/etc/modules.d/50-softdog; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/etc/modules-boot.d; ln -sf ../modules.d/50-softdog /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ssb/ssb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/etc/modules.d; ( echo "ssb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/etc/modules.d/18-ssb; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/etc/modules-boot.d; ln -sf ../modules.d/18-ssb /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/bcma/bcma.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma/etc/modules.d; ( echo "bcma"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma/etc/modules.d/29-bcma; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/rtc/rtc-ds1307.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307/etc/modules.d; ( echo "rtc-ds1307"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307/etc/modules.d/rtc-ds1307; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/rtc/rtc-pcf8563.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563/etc/modules.d; ( echo "rtc-pcf8563"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563/etc/modules.d/rtc-pcf8563; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_nandecctest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_oobtest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_pagetest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_readtest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_speedtest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_stresstest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_subpagetest.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/tests/mtd_torturetest.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/mtdoops.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mtd/devices/mtdram.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250_base.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250_pci.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/serial_mctrl_gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250/etc/modules.d; ( echo "8250"; echo "8250_base"; echo "8250_pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250/etc/modules.d/serial-8250; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250_base.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/serial_mctrl_gpio.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/tty/serial/8250/8250_exar.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar/etc/modules.d; ( echo "8250"; echo "8250_base"; echo "8250_exar"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar/etc/modules.d/serial-8250-exar; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core for mod in ; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/base/regmap/regmap-spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/base/regmap/regmap-i2c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/base/regmap/regmap-i2c.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/kernel/configs.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig/etc/modules.d; ( echo "configs"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig/etc/modules.d/70-ikconfig; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/mm/zsmalloc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/block/zram/zram.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram/etc/modules.d; ( echo "zram"; echo "zsmalloc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram/etc/modules.d/20-zram; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/pps/pps_core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/etc/modules.d; ( echo "pps_core"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/etc/modules.d/17-pps; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/etc/modules-boot.d; ln -sf ../modules.d/17-pps /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/pps/clients/pps-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/etc/modules.d; ( echo "pps-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/etc/modules.d/18-pps-gpio; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/etc/modules-boot.d; ln -sf ../modules.d/18-pps-gpio /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/pps/clients/pps-ldisc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/etc/modules.d; ( echo "pps-ldisc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/etc/modules.d/18-pps-ldisc; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/etc/modules-boot.d; ln -sf ../modules.d/18-pps-ldisc /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/ptp/ptp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/etc/modules.d; ( echo "ptp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/etc/modules.d/18-ptp; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/etc/modules-boot.d; ln -sf ../modules.d/18-ptp /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/hw_random/rng-core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/hw_random/rng-core.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/misc/gpio-beeper.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper/etc/modules.d; ( echo "gpio-beeper"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper/etc/modules.d/50-gpio-beeper; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/misc/echo/echo.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo/etc/modules.d; ( echo "echo"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo/etc/modules.d/50-echo; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-i2c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-i2c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-i2c true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-i2c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-spi true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/tpm/tpm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/etc/modules.d; ( echo "tpm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/etc/modules.d/10-tpm; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/etc/modules-boot.d; ln -sf ../modules.d/10-tpm /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/tpm/tpm_i2c_atmel.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/etc/modules.d; ( echo "tpm_i2c_atmel"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/etc/modules.d/40-tpm-i2c-atmel; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/etc/modules-boot.d; ln -sf ../modules.d/40-tpm-i2c-atmel /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/char/tpm/tpm_i2c_infineon.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/etc/modules.d; ( echo "tpm_i2c_infineon"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/etc/modules.d/40-tpm-i2c-infineon; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/etc/modules-boot.d; ln -sf ../modules.d/40-tpm-i2c-infineon /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w83627hf-wdt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w83627hf-wdt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w83627hf-wdt true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w83627hf-wdt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-itco-wdt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-itco-wdt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-itco-wdt true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-itco-wdt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-it87-wdt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-it87-wdt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-it87-wdt true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-it87-wdt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/soundcore.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-hwdep.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-seq-device.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-rawmidi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-timer.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-pcm.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/oss/snd-mixer-oss.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/oss/snd-pcm-oss.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/snd-compress.ko ; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core/etc/modules.d; ( echo "snd"; echo "snd-compress"; echo "snd-hwdep"; echo "snd-mixer-oss"; echo "snd-pcm"; echo "snd-pcm-oss"; echo "snd-rawmidi"; echo "snd-seq-device"; echo "snd-timer"; echo "soundcore"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core/etc/modules.d/30-sound-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/ac97_bus.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/pci/ac97/snd-ac97-codec.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97/etc/modules.d; ( echo "ac97_bus"; echo "snd-ac97-codec"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97/etc/modules.d/35-ac97; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/drivers/mpu401/snd-mpu401-uart.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401/etc/modules.d; ( echo "snd-mpu401-uart"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401/etc/modules.d/35-sound-mpu401; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/seq/snd-seq.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/seq/snd-seq-midi-event.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/core/seq/snd-seq-midi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq/etc/modules.d; ( echo "snd-seq"; echo "snd-seq-midi"; echo "snd-seq-midi-event"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq/etc/modules.d/35-sound-seq; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/pci/snd-ens1371.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371/etc/modules.d; ( echo "snd-ens1371"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371/etc/modules.d/36-sound-ens1371; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/pci/snd-intel8x0.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0/etc/modules.d; ( echo "snd-intel8x0"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0/etc/modules.d/36-sound-i8x0; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/pci/snd-via82xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx/etc/modules.d; ( echo "snd-via82xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx/etc/modules.d/36-sound-via82xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/soc/snd-soc-core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core/etc/modules.d; ( echo "snd-soc-core"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core/etc/modules.d/55-sound-soc-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/soc/codecs/snd-soc-ac97.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97/etc/modules.d; ( echo "snd-soc-ac97"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97/etc/modules.d/57-sound-soc-ac97; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/drivers/snd-dummy.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy/etc/modules.d; ( echo "snd-dummy"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy/etc/modules.d/32-sound-dummy; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-core true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-realtek.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-realtek mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-realtek true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-realtek.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cmedia.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cmedia mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cmedia true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cmedia.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-analog.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-analog mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-analog true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-analog.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-idt.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-idt mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-idt true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-idt.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-si3054.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-si3054 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-si3054 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-si3054.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cirrus.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cirrus mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cirrus true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cirrus.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0110.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0110 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0110 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0110.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0132.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0132 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0132 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0132.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-conexant.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-conexant mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-conexant true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-conexant.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-via.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-via mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-via true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-via.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-hdmi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-hdmi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-hdmi true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-hdmi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/host/of_mmc_spi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/mmc/host/mmc_spi.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi/etc/modules.d; ( echo "mmc_spi"; echo "of_mmc_spi"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi/etc/modules.d/mmc-spi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-bitbang.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-bitbang.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio/etc/modules.d; ( echo "spi-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio/etc/modules.d/spi-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spi-gpio.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/spi/spidev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev/etc/modules.d; ( echo "spidev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev/etc/modules.d/spi-dev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/core/usbcore.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/common/usb-common.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/etc/modules.d; ( echo "usb-common"; echo "usbcore"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/etc/modules.d/20-usb-core; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/etc/modules-boot.d; ln -sf ../modules.d/20-usb-core /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/core/ledtrig-usbport.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport/etc/modules.d; ( echo "ledtrig-usbport"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport/etc/modules.d/50-usb-ledtrig-usbport; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/phy/phy-generic.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/etc/modules.d; ( echo "phy-generic"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/etc/modules.d/21-usb-phy-nop; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/etc/modules-boot.d; ln -sf ../modules.d/21-usb-phy-nop /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7100-usb.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7200-usb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/etc/modules.d; ( echo "phy-ar7100-usb"; echo "phy-ar7200-usb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/etc/modules.d/21-phy-ath79-usb; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/etc/modules-boot.d; ln -sf ../modules.d/21-phy-ath79-usb /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7100-usb.ko' is built-in. NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/phy/phy-ar7200-usb.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/uhci-hcd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/etc/modules.d; ( echo "uhci-hcd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/etc/modules.d/50-usb-uhci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/etc/modules-boot.d; ln -sf ../modules.d/50-usb-uhci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ohci-hcd.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ohci-platform.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/etc/modules.d; ( echo "ohci-at91"; echo "ohci-hcd"; echo "ohci-platform"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/etc/modules.d/50-usb-ohci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/etc/modules-boot.d; ln -sf ../modules.d/50-usb-ohci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ohci-pci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/etc/modules.d; ( echo "ohci-pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/etc/modules.d/51-usb-ohci-pci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/etc/modules-boot.d; ln -sf ../modules.d/51-usb-ohci-pci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ehci-hcd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/etc/modules.d; ( echo "ehci-hcd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/etc/modules.d/35-usb-ehci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/etc/modules-boot.d; ln -sf ../modules.d/35-usb-ehci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ehci-platform.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ehci-fsl.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/fsl-mph-dr-of.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/etc/modules.d; ( echo "ehci-atmel"; echo "ehci-fsl"; echo "ehci-hcd"; echo "ehci-orion"; echo "ehci-platform"; echo "fsl-mph-dr-of"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/etc/modules.d/40-usb2; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/etc/modules-boot.d; ln -sf ../modules.d/40-usb2 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/ehci-pci.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/etc/modules.d; ( echo "ehci-pci"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/etc/modules.d/42-usb2-pci; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/etc/modules-boot.d; ln -sf ../modules.d/42-usb2-pci /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/dwc2/dwc2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/etc/modules.d; ( echo "dwc2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/etc/modules.d/54-usb-dwc2; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/etc/modules-boot.d; ln -sf ../modules.d/54-usb-dwc2 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/dwc3/dwc3.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/etc/modules.d; ( echo "dwc3"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/etc/modules.d/54-usb-dwc3; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/etc/modules-boot.d; ln -sf ../modules.d/54-usb-dwc3 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/class/cdc-acm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm/etc/modules.d; ( echo "cdc-acm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm/etc/modules.d/usb-acm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/class/cdc-wdm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm/etc/modules.d; ( echo "cdc-wdm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm/etc/modules.d/usb-wdm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/usb/snd-usbmidi-lib.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/sound/usb/snd-usb-audio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio/etc/modules.d; ( echo "snd-usb-audio"; echo "snd-usbmidi-lib"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio/etc/modules.d/usb-audio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/class/usblp.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer/etc/modules.d; ( echo "usblp"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer/etc/modules.d/usb-printer; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/usbserial.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial/etc/modules.d; ( echo "usbserial"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial/etc/modules.d/usb-serial; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/belkin_sa.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin/etc/modules.d; ( echo "belkin_sa"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin/etc/modules.d/usb-serial-belkin; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/ch341.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341/etc/modules.d; ( echo "ch341"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341/etc/modules.d/usb-serial-ch341; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/io_edgeport.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport/etc/modules.d; ( echo "io_edgeport"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport/etc/modules.d/usb-serial-edgeport; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/ftdi_sio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi/etc/modules.d; ( echo "ftdi_sio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi/etc/modules.d/usb-serial-ftdi; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/garmin_gps.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin/etc/modules.d; ( echo "garmin_gps"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin/etc/modules.d/usb-serial-garmin; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/usb-serial-simple.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple/etc/modules.d; ( echo "usb-serial-simple"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple/etc/modules.d/usb-serial-simple; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/ti_usb_3410_5052.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb/etc/modules.d; ( echo "ti_usb_3410_5052"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb/etc/modules.d/usb-serial-ti-usb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/ipw.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw/etc/modules.d; ( echo "ipw"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw/etc/modules.d/usb-serial-ipw; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/mct_u232.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct/etc/modules.d; ( echo "mct_u232"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct/etc/modules.d/usb-serial-mct; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/mos7720.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720/etc/modules.d; ( echo "mos7720"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720/etc/modules.d/usb-serial-mos7720; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/mos7840.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840/etc/modules.d; ( echo "mos7840"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840/etc/modules.d/usb-serial-mos7840; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/pl2303.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303/etc/modules.d; ( echo "pl2303"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303/etc/modules.d/usb-serial-pl2303; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/cp210x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x/etc/modules.d; ( echo "cp210x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x/etc/modules.d/usb-serial-cp210x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/ark3116.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116/etc/modules.d; ( echo "ark3116"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116/etc/modules.d/usb-serial-ark3116; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/oti6858.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858/etc/modules.d; ( echo "oti6858"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858/etc/modules.d/usb-serial-oti6858; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/sierra.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless/etc/modules.d; ( echo "sierra"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless/etc/modules.d/usb-serial-sierrawireless; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/visor.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor/etc/modules.d; ( echo "visor"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor/etc/modules.d/usb-serial-visor; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/cypress_m8.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8/etc/modules.d; ( echo "cypress_m8"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8/etc/modules.d/usb-serial-cypress-m8; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/keyspan.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/misc/ezusb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan/etc/modules.d; ( echo "ezusb"; echo "keyspan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan/etc/modules.d/usb-serial-keyspan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/usb_wwan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan/etc/modules.d; ( echo "usb_wwan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan/etc/modules.d/usb-serial-wwan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/option.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option/etc/modules.d; ( echo "option"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option/etc/modules.d/usb-serial-option; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/serial/qcserial.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm/etc/modules.d; ( echo "qcserial"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm/etc/modules.d/usb-serial-qualcomm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/usb-storage.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/etc/modules.d; ( echo "usb-storage"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/etc/modules.d/usb-storage; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/etc/modules-boot.d; ln -sf ../modules.d/usb-storage /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-alauda.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-cypress.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-datafab.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-freecom.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-isd200.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-jumpshot.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-karma.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-sddr09.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-sddr55.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/ums-usbat.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras/etc/modules.d; ( echo "ums-alauda"; echo "ums-cypress"; echo "ums-datafab"; echo "ums-freecom"; echo "ums-isd200"; echo "ums-jumpshot"; echo "ums-karma"; echo "ums-sddr09"; echo "ums-sddr55"; echo "ums-usbat"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras/etc/modules.d/usb-storage-extras; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/storage/uas.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/etc/modules.d; ( echo "uas"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/etc/modules.d/usb-storage-uas; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/etc/modules-boot.d; ln -sf ../modules.d/usb-storage-uas /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/atm/usbatm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm/etc/modules.d; ( echo "usbatm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm/etc/modules.d/usb-atm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/atm/speedtch.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch/etc/modules.d; ( echo "speedtch"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch/etc/modules.d/usb-atm-speedtouch; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/atm/ueagle-atm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle/etc/modules.d; ( echo "ueagle-atm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle/etc/modules.d/usb-atm-ueagle; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/atm/cxacru.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru/etc/modules.d; ( echo "cxacru"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru/etc/modules.d/usb-atm-cxacru; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/usbnet.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net/etc/modules.d; ( echo "usbnet"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net/etc/modules.d/usb-net; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/asix.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix/etc/modules.d; ( echo "asix"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix/etc/modules.d/usb-net-asix; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/ax88179_178a.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179/etc/modules.d; ( echo "ax88179_178a"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179/etc/modules.d/usb-net-asix-ax88179; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/hso.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso/etc/modules.d; ( echo "hso"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso/etc/modules.d/usb-net-hso; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/kaweth.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth/etc/modules.d; ( echo "kaweth"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth/etc/modules.d/usb-net-kaweth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/pegasus.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus/etc/modules.d; ( echo "pegasus"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus/etc/modules.d/usb-net-pegasus; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/mcs7830.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830/etc/modules.d; ( echo "mcs7830"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830/etc/modules.d/usb-net-mcs7830; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/smsc95xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx/etc/modules.d; ( echo "smsc95xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx/etc/modules.d/usb-net-smsc95xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/dm9601.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether/etc/modules.d; ( echo "dm9601"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether/etc/modules.d/usb-net-dm9601-ether; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/cdc_ether.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether/etc/modules.d; ( echo "cdc_ether"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether/etc/modules.d/usb-net-cdc-ether; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/cdc_eem.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem/etc/modules.d; ( echo "cdc_eem"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem/etc/modules.d/usb-net-cdc-eem; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/cdc_subset.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset/etc/modules.d; ( echo "cdc_subset"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset/etc/modules.d/usb-net-cdc-subset; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/qmi_wwan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan/etc/modules.d; ( echo "qmi_wwan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan/etc/modules.d/usb-net-qmi-wwan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/rtl8150.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150/etc/modules.d; ( echo "rtl8150"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150/etc/modules.d/usb-net-rtl8150; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/r8152.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152/etc/modules.d; ( echo "r8152"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152/etc/modules.d/usb-net-rtl8152; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/sr9700.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700/etc/modules.d; ( echo "sr9700"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700/etc/modules.d/usb-net-sr9700; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/rndis_host.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis/etc/modules.d; ( echo "rndis_host"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis/etc/modules.d/usb-net-rndis; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/cdc_mbim.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim/etc/modules.d; ( echo "cdc_mbim"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim/etc/modules.d/usb-net-cdc-mbim; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/cdc_ncm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm/etc/modules.d; ( echo "cdc_ncm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm/etc/modules.d/usb-net-cdc-ncm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/huawei_cdc_ncm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm/etc/modules.d; ( echo "huawei_cdc_ncm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm/etc/modules.d/usb-net-huawei-cdc-ncm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/sierra_net.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless/etc/modules.d; ( echo "sierra_net"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless/etc/modules.d/usb-net-sierrawireless; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/ipheth.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth/etc/modules.d; ( echo "ipheth"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth/etc/modules.d/usb-net-ipheth; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/kalmia.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia/etc/modules.d; ( echo "kalmia"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia/etc/modules.d/usb-net-kalmia; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/usb/plusb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl/etc/modules.d; ( echo "plusb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl/etc/modules.d/usb-net-pl; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/hid/usbhid/usbhid.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid/etc/modules.d; ( echo "usbhid"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid/etc/modules.d/usb-hid; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/misc/yealink.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink/etc/modules.d; ( echo "yealink"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink/etc/modules.d/usb-yealink; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/input/misc/cm109.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109/etc/modules.d; ( echo "cm109"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109/etc/modules.d/usb-cm109; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/misc/usbtest.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/usbip/usbip-core.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip/etc/modules.d; ( echo "usbip-core"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip/etc/modules.d/usbip; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/usbip/vhci-hcd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client/etc/modules.d; ( echo "vhci-hcd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client/etc/modules.d/usbip-client; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/usbip/usbip-host.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server/etc/modules.d; ( echo "usbip-host"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server/etc/modules.d/usbip-server; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/chipidea/ci_hdrc.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/common/ulpi.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/roles/roles.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/etc/modules.d; ( echo "ci_hdrc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/etc/modules.d/39-usb-chipidea; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/etc/modules-boot.d; ln -sf ../modules.d/39-usb-chipidea /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/chipidea/ci_hdrc_usb2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/etc/modules.d; ( echo "ci_hdrc_usb2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/etc/modules.d/39-usb-chipidea2; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/etc/modules-boot.d; ln -sf ../modules.d/39-usb-chipidea2 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/extcon/extcon-core.ko' is built-in. rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/mon/usbmon.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon/etc/modules.d; ( echo "usbmon"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon/etc/modules.d/usbmon; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/xhci-hcd.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/xhci-pci.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/host/xhci-plat-hcd.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/etc/modules.d; ( echo "xhci-hcd"; echo "xhci-pci"; echo "xhci-plat-hcd"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/etc/modules.d/54-usb3; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/etc/modules-boot.d; ln -sf ../modules.d/54-usb3 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/usb/misc/chaoskey.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey/etc/modules.d; ( echo "chaoskey"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey/etc/modules.d/chaoskey; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/v4l2-core/videodev.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core/etc/modules.d; ( echo "v4l2-common"; echo "videodev"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core/etc/modules.d/60-video-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/common/videobuf2/videobuf2-common.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/common/videobuf2/videobuf2-v4l2.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/common/videobuf2/videobuf2-memops.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/common/videobuf2/videobuf2-vmalloc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2/etc/modules.d; ( echo "videobuf-v4l2"; echo "videobuf2-core"; echo "videobuf2-memops"; echo "videobuf2-vmalloc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2/etc/modules.d/65-video-videobuf2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/cpia2/cpia2.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2/etc/modules.d; ( echo "cpia2"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2/etc/modules.d/video-cpia2; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/pwc/pwc.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc/etc/modules.d; ( echo "pwc"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc/etc/modules.d/video-pwc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/uvc/uvcvideo.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc/etc/modules.d; ( echo "uvcvideo"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc/etc/modules.d/video-uvc; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_main.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core/etc/modules.d; ( echo "gspca_main"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core/etc/modules.d/video-gspca-core; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_conex.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex/etc/modules.d; ( echo "gspca_conex"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex/etc/modules.d/video-gspca-conex; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_etoms.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms/etc/modules.d; ( echo "gspca_etoms"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms/etc/modules.d/video-gspca-etoms; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_finepix.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix/etc/modules.d; ( echo "gspca_finepix"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix/etc/modules.d/video-gspca-finepix; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_mars.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars/etc/modules.d; ( echo "gspca_mars"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars/etc/modules.d/video-gspca-mars; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_mr97310a.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a/etc/modules.d; ( echo "gspca_mr97310a"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a/etc/modules.d/video-gspca-mr97310a; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_ov519.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519/etc/modules.d; ( echo "gspca_ov519"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519/etc/modules.d/video-gspca-ov519; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_ov534.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534/etc/modules.d; ( echo "gspca_ov534"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534/etc/modules.d/video-gspca-ov534; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_ov534_9.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9/etc/modules.d; ( echo "gspca_ov534_9"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9/etc/modules.d/video-gspca-ov534-9; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_pac207.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207/etc/modules.d; ( echo "gspca_pac207"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207/etc/modules.d/video-gspca-pac207; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_pac7311.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311/etc/modules.d; ( echo "gspca_pac7311"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311/etc/modules.d/video-gspca-pac7311; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_se401.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401/etc/modules.d; ( echo "gspca_se401"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401/etc/modules.d/video-gspca-se401; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sn9c20x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x/etc/modules.d; ( echo "gspca_sn9c20x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x/etc/modules.d/video-gspca-sn9c20x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sonixb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb/etc/modules.d; ( echo "gspca_sonixb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb/etc/modules.d/video-gspca-sonixb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sonixj.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj/etc/modules.d; ( echo "gspca_sonixj"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj/etc/modules.d/video-gspca-sonixj; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca500.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500/etc/modules.d; ( echo "gspca_spca500"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500/etc/modules.d/video-gspca-spca500; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca501.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501/etc/modules.d; ( echo "gspca_spca501"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501/etc/modules.d/video-gspca-spca501; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca505.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505/etc/modules.d; ( echo "gspca_spca505"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505/etc/modules.d/video-gspca-spca505; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca506.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506/etc/modules.d; ( echo "gspca_spca506"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506/etc/modules.d/video-gspca-spca506; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca508.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508/etc/modules.d; ( echo "gspca_spca508"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508/etc/modules.d/video-gspca-spca508; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_spca561.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561/etc/modules.d; ( echo "gspca_spca561"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561/etc/modules.d/video-gspca-spca561; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sq905.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905/etc/modules.d; ( echo "gspca_sq905"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905/etc/modules.d/video-gspca-sq905; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sq905c.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c/etc/modules.d; ( echo "gspca_sq905c"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c/etc/modules.d/video-gspca-sq905c; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_stk014.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014/etc/modules.d; ( echo "gspca_stk014"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014/etc/modules.d/video-gspca-stk014; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_sunplus.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus/etc/modules.d; ( echo "gspca_sunplus"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus/etc/modules.d/video-gspca-sunplus; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_t613.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613/etc/modules.d; ( echo "gspca_t613"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613/etc/modules.d/video-gspca-t613; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_tv8532.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532/etc/modules.d; ( echo "gspca_tv8532"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532/etc/modules.d/video-gspca-tv8532; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_vc032x.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x/etc/modules.d; ( echo "gspca_vc032x"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x/etc/modules.d/video-gspca-vc032x; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_zc3xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx/etc/modules.d; ( echo "gspca_zc3xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx/etc/modules.d/video-gspca-zc3xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/m5602/gspca_m5602.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602/etc/modules.d; ( echo "gspca_m5602"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602/etc/modules.d/video-gspca-m5602; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/stv06xx/gspca_stv06xx.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx/etc/modules.d; ( echo "gspca_stv06xx"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx/etc/modules.d/video-gspca-stv06xx; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gl860/gspca_gl860.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860/etc/modules.d; ( echo "gspca_gl860"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860/etc/modules.d/video-gspca-gl860; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_jeilinj.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj/etc/modules.d; ( echo "gspca_jeilinj"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj/etc/modules.d/video-gspca-jeilinj; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/media/usb/gspca/gspca_konica.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica/etc/modules.d; ( echo "gspca_konica"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica/etc/modules.d/video-gspca-konica; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/wire.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/masters/w1-gpio.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio/etc/modules.d; ( echo "w1-gpio"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio/etc/modules.d/w1-master-gpio; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/masters/ds2482.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482/etc/modules.d; ( echo "ds2482"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482/etc/modules.d/w1-master-ds2482; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/masters/ds2490.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490/etc/modules.d; ( echo "ds2490"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490/etc/modules.d/w1-master-ds2490; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/slaves/w1_therm.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm/etc/modules.d; ( echo "w1_therm"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm/etc/modules.d/w1-slave-therm; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/slaves/w1_smem.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem/etc/modules.d; ( echo "w1_smem"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem/etc/modules.d/w1-slave-smem; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/slaves/w1_ds2431.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431/etc/modules.d; ( echo "w1_ds2431"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431/etc/modules.d/w1-slave-ds2431; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/slaves/w1_ds2433.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433/etc/modules.d; ( echo "w1_ds2433"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433/etc/modules.d/w1-slave-ds2433; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2760.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2760 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2760 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2760.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/w1/slaves/w1_ds2413.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413/etc/modules.d; ( echo "w1_ds2413"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413/etc/modules.d/w1-slave-ds2413; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-prism54.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-prism54 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-prism54 true touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-prism54.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/staging/rtl8712/r8712u.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su/etc/modules.d; ( echo "r8712u"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su/etc/modules.d/net-rtl8192su; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ieee802154/ieee802154.ko /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ieee802154/ieee802154_socket.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154/etc/modules.d; ( echo "ieee802154"; echo "ieee802154_socket"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154/etc/modules.d/90-ieee802154; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/mac802154/mac802154.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154/etc/modules.d; ( echo "mac802154"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154/etc/modules.d/91-mac802154; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/fakelb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb/etc/modules.d; ( echo "fakelb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb/etc/modules.d/92-fakelb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/atusb.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb/etc/modules.d; ( echo "atusb"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb/etc/modules.d/atusb; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/at86rf230.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230/etc/modules.d; ( echo "at86rf230"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230/etc/modules.d/at86rf230; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/mrf24j40.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40/etc/modules.d; ( echo "mrf24j40"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40/etc/modules.d/mrf24j40; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/cc2520.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520/etc/modules.d; ( echo "cc2520"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520/etc/modules.d/cc2520; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210 mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210 for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/net/ieee802154/ca8210.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210/etc/modules.d; ( echo "ca8210"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210/etc/modules.d/ca8210; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/net/ieee802154/6lowpan/ieee802154_6lowpan.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan/etc/modules.d; ( echo "ieee802154_6lowpan"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan/etc/modules.d/91-ieee802154-6lowpan; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan.installed rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset for mod in /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-reset.ko; do if grep -q "${mod##/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/}" "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/modules.builtin"; then echo "NOTICE: module '$mod' is built-in."; elif [ -e $mod ]; then mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/lib/modules/5.4.63 ; cp -fpR -L $mod /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/lib/modules/5.4.63/ ; else echo "ERROR: module '$mod' is missing." >&2; exit 1; fi; done; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/etc/modules.d; ( echo "leds-reset"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/etc/modules.d/60-leds-reset; mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/etc/modules-boot.d; ln -sf ../modules.d/60-leds-reset /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/etc/modules-boot.d/; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset.installed NOTICE: module '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63/drivers/leds/leds-reset.ko' is built-in. mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-aoe/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-aoe_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-ahci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-ahci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-artop/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-artop_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-marvell-sata/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-marvell-sata_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-nvidia-sata/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-nvidia-sata_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-pdc202xx-old/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-pdc202xx-old_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-piix/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-piix_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-sil_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-sil24/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-sil24_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ata-via-sata/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ata-via-sata_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-block2mtd/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-block2mtd_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dax/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dax_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm-raid/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dm-raid_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iscsi-initiator/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iscsi-initiator_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-mod/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-mod_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-linear/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-linear_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid0/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-raid0_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid1/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-raid1_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid10/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-raid10_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-raid456/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-raid456_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-md-multipath/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-md-multipath_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-loop/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-loop_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nbd/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nbd_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-scsi-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-generic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-scsi-generic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-cdrom/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-scsi-cdrom_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-scsi-tape/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-scsi-tape_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iosched-bfq/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iosched-bfq_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-bcm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-bcm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-c-can_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-pci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-c-can-pci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-c-can-platform/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-c-can-platform_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-gw/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-gw_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-mcp251x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-mcp251x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-raw/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-raw_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-slcan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-slcan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-8dev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-usb-8dev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-ems/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-usb-ems_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-esd/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-usb-esd_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-kvaser/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-usb-kvaser_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-usb-peak/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-usb-peak_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-can-vcan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-can-vcan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-acompress/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-acompress_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-aead/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-aead_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-arc4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-arc4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-authenc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-authenc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cbc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-cbc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ccm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-ccm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cmac/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-cmac_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-crc32_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-crc32c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-crc32c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ctr/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-ctr_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-cts/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-cts_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-deflate/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-deflate_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-des/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-des_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-ecb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ecdh/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-ecdh_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-echainiv/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-echainiv_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-fcrypt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-fcrypt_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gcm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-gcm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xcbc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-xcbc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-gf128/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-gf128_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-ghash/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-ghash_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hash/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hash_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hmac/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hmac_installed WARNING: kmod-crypto-hw-ccp is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-ccp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hw-ccp_installed WARNING: kmod-crypto-hw-geode is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-geode/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hw-geode_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-hifn-795x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hw-hifn-795x_installed WARNING: kmod-crypto-hw-padlock is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-padlock/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hw-padlock_installed WARNING: kmod-crypto-hw-talitos is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-hw-talitos/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-hw-talitos_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-kpp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-kpp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-manager/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-manager_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-md4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-md5/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-md5_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-michael-mic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-michael-mic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-misc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-misc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-null/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-null_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcbc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-pcbc_installed WARNING: kmod-crypto-pcompress is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-pcompress/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-pcompress_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rsa/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-rsa_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rmd160/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-rmd160_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-rng/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-rng_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-seqiv/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-seqiv_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha1/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-sha1_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha256/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-sha256_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-sha512/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-sha512_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-test/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-test_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-user/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-user_installed WARNING: kmod-crypto-wq is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-wq/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-wq_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-crypto-xts/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-crypto-xts_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-firewire_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-net/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-firewire-net_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-ohci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-firewire-ohci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-firewire-sbp2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-firewire-sbp2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-autofs4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-autofs4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-btrfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-btrfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cifs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-cifs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-configfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-configfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-cramfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-cramfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-exportfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-exportfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ext4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-ext4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-f2fs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-f2fs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-fscache/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-fscache_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-hfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-hfsplus/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-hfsplus_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-isofs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-isofs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-jfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-jfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-minix/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-minix_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-msdos/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-msdos_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfs-common_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-common-rpcsec/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfs-common-rpcsec_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v3/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfs-v3_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfs-v4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfs-v4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-nfsd/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-nfsd_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-ntfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-ntfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-reiserfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-reiserfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-squashfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-squashfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-udf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-udf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-vfat/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-vfat_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fs-xfs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fs-xfs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fuse/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fuse_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ad7418/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-ad7418_installed WARNING: kmod-hwmon-ads1015 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ads1015/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-ads1015_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7410/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-adt7410_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adt7475/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-adt7475_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-dme1737/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-dme1737_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-drivetemp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-drivetemp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-gpiofan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-gpiofan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina209/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-ina209_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ina2xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-ina2xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-it87/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-it87_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm63/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm63_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm75/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm75_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm77/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm77_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm85/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm85_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm90/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm90_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm92/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm92_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-lm95241/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-lm95241_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-ltc4151/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-ltc4151_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-mcp3021/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-mcp3021_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pmbus-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pmbus-zl6100/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pmbus-zl6100_installed WARNING: kmod-hwmon-pwmfan is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-pwmfan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-pwmfan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sch5627/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-sch5627_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-sht21/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-sht21_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp102/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-tmp102_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp103/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-tmp103_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-tmp421/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-tmp421_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-vid/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-vid_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-w83793/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-w83793_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hwmon-adcxx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hwmon-adcxx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-bit/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-algo-bit_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pca/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-algo-pca_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-algo-pcf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-algo-pcf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-mux_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-mux-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca9541/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-mux-pca9541_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-mux-pca954x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-mux-pca954x_installed WARNING: kmod-i2c-pxa is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-pxa/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-pxa_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-smbus/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-smbus_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i2c-tiny-usb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i2c-tiny-usb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-kfifo-buf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-kfifo-buf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-industrialio-triggered-buffer/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-industrialio-triggered-buffer_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ad799x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-ad799x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-hmc5843/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-hmc5843_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bh1750/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bh1750_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-am2315/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-am2315_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-dht11/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-dht11_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bme680_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bme680-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bme680-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bme680-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bmp280_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bmp280-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-bmp280-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-bmp280-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-htu21/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-htu21_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-ccs811/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-ccs811_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-si7020/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-si7020_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-st_accel_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-st_accel-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-st_accel-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-st_accel-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-lsm6dsx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-lsm6dsx-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-lsm6dsx-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-lsm6dsx-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-sps30/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-sps30_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-tsl4531/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-tsl4531_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-fxos8700_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-fxos8700-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iio-fxos8700-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iio-fxos8700-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hid_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hid-generic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hid-generic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-evdev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-evdev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-gpio-keys_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-keys-polled/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-gpio-keys-polled_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-gpio-encoder/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-gpio-encoder_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-joydev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-joydev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-polldev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-polldev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-matrixkmap/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-matrixkmap_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-touchscreen-ads7846/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-touchscreen-ads7846_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-input-uinput/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-input-uinput_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-leds-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-activity/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-activity_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-heartbeat/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-heartbeat_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-netdev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-netdev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-default-on/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-default-on_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-timer/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-timer_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-transient/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-transient_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ledtrig-oneshot/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ledtrig-oneshot_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-pca963x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-leds-pca963x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-ccitt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc-ccitt_installed echo "kmod-lib-crc-ccitt" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc-itu-t/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc-itu-t_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc7/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc7_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc8/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc8_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc16/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc16_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-crc32c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-crc32c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lzo/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-lzo_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zstd/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-zstd_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-lz4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-lz4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-raid6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-raid6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-xor/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-xor_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-textsearch/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-textsearch_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-inflate/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-zlib-inflate_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-zlib-deflate/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-zlib-deflate_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lib-cordic/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lib-cordic_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-asn1-decoder/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-asn1-decoder_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis190/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sis190_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-skge/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-skge_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-alx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-alx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atl2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atl1_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atl1c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atl1e/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atl1e_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-libphy/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-libphy_installed WARNING: kmod-phylink is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylink/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phylink_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mii/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mii_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mdio-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-et131x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-et131x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phylib-broadcom/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phylib-broadcom_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-broadcom/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phy-broadcom_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-bcm84881/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phy-bcm84881_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-realtek/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phy-realtek_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-swconfig/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-swconfig_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-bcm53xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-bcm53xx-mdio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-bcm53xx-mdio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-mvsw61xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-mvsw61xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-ip17xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-ip17xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8306/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-rtl8306_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366-smi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-rtl8366-smi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366rb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-rtl8366rb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8366s/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-rtl8366s_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-switch-rtl8367b/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-switch-rtl8367b_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-natsemi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-natsemi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r6040/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-r6040_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-niu/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-niu_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sis900/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sis900_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sky2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sky2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-rhine/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-via-rhine_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-via-velocity/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-via-velocity_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139too/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-8139too_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-8139cp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-8139cp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-r8169/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-r8169_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ne2k-pci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ne2k-pci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e100/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-e100_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-e1000/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-e1000_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-igb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-igb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbe/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ixgbe_installed WARNING: kmod-ixgbevf is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ixgbevf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ixgbevf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-i40e/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-i40e_installed WARNING: kmod-iavf is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iavf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iavf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-b44/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-b44_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-3c59x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-3c59x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pcnet32/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pcnet32_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tg3/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tg3_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcpci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hfcpci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-hfcmulti/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-hfcmulti_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macvlan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-macvlan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tulip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tulip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-solos-pci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-solos-pci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dummy/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dummy_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ifb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ifb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dm9000/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dm9000_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-forcedeth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-forcedeth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-of-mdio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-of-mdio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vmxnet3/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-vmxnet3_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-ks8995/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-spi-ks8995_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ethoc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ethoc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bnx2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bnx2x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bnx2x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-be2net/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-be2net_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx4-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mlx4-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mlx5-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mlx5-core_installed WARNING: kmod-sfp is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sfp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sfp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-reject_installed echo "kmod-nf-reject" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-reject6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-reject6_installed echo "kmod-nf-reject6" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-ipt_installed echo "kmod-nf-ipt" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipt6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-ipt6_installed echo "kmod-nf-ipt6" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-core_installed echo "kmod-ipt-core" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-conntrack_installed echo "kmod-nf-conntrack" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install WARNING: kmod-nf-conntrack6 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-conntrack6_installed echo "kmod-nf-conntrack6" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-nat_installed echo "kmod-nf-nat" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install WARNING: kmod-nf-nat6 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nat6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-nat6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-flow/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-flow_installed echo "kmod-nf-flow" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-conntrack_installed echo "kmod-ipt-conntrack" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-extra/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-conntrack-extra_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-conntrack-label/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-conntrack-label_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-filter/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-filter_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-offload/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-offload_installed echo "kmod-ipt-offload" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipopt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-ipopt_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipsec/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-ipsec_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ipset/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-ipset_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-ipvs_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-ftp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-ipvs-ftp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-ipvs-sip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-ipvs-sip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-nat_installed echo "kmod-ipt-nat" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-raw_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-raw6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-raw6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-nat6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nat-extra/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-nat-extra_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-nathelper_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-nathelper-extra/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-nathelper-extra_installed WARNING: kmod-ipt-ulog is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-ulog/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-ulog_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nflog/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-nflog_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-nfqueue/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-nfqueue_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-debug/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-debug_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-led/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-led_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tproxy/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-tproxy_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-tee/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-tee_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-u32/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-u32_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-checksum/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-checksum_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-iprange/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-iprange_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-cluster/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-cluster_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-clusterip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-clusterip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-extra/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-extra_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-physdev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-physdev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ip6tables_installed echo "kmod-ip6tables" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6tables-extra/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ip6tables-extra_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-arptables/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-arptables_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-br-netfilter/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-br-netfilter_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ebtables_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ebtables-ipv4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-ipv6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ebtables-ipv6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ebtables-watchers/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ebtables-watchers_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nfnetlink_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-log/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nfnetlink-log_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nfnetlink-queue/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nfnetlink-queue_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nf-conntrack-netlink/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nf-conntrack-netlink_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-hashlimit/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-hashlimit_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipt-rpfilter/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipt-rpfilter_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-arp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-arp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-bridge/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-bridge_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-nat_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-offload/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-offload_installed WARNING: kmod-nft-nat6 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-nat6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-nat6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-netdev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-netdev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nft-fib/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nft-fib_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atmtcp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atmtcp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bonding/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bonding_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-udptunnel4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-udptunnel6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-udptunnel6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-vxlan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-vxlan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-geneve/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-geneve_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nsh/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nsh_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-capi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-capi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-misdn/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-misdn_installed WARNING: kmod-isdn4linux is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-isdn4linux/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-isdn4linux_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipsec_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipsec4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipsec6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipsec6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iptunnel_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip-vti/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ip-vti_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-vti/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ip6-vti_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-xfrm-interface/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-xfrm-interface_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel4/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iptunnel4_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-iptunnel6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-iptunnel6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sit/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sit_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fou_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fou6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fou6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ip6-tunnel/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ip6-tunnel_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gre_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gre6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gre6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tun/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tun_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-veth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-veth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slhc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-slhc_installed echo "kmod-slhc" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ppp_installed echo "kmod-ppp" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppp-synctty/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ppp-synctty_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppox/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pppox_installed echo "kmod-pppox" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoe/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pppoe_installed echo "kmod-pppoe" >> /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/linux.default.install mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppoa/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pppoa_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pptp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pptp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pppol2tp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pppol2tp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ipoa/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ipoa_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mppe/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mppe_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-cake/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-cake_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-flower/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-flower_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-act-vlan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-act-vlan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-mqprio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-mqprio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-connmark/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-connmark_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ctinfo/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-ctinfo_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-ipset/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-ipset_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched-bpf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched-bpf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bpf-test/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bpf-test_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sched/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sched_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tcp-bbr/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tcp-bbr_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ax25/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ax25_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pktgen/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pktgen_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-l2tp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-eth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-l2tp-eth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-l2tp-ip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-l2tp-ip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sctp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sctp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netem/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-netem_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-slip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-slip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dnsresolver/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dnsresolver_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mpls/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mpls_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nlmon/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nlmon_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mdio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mdio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-macsec/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-macsec_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-netlink-diag/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-netlink-diag_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-base/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-base_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp437/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp437_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp775/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp775_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp850/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp850_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp852/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp852_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp862/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp862_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp864/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp864_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp866/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp866_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp932/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp932_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp936/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp936_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp950/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp950_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1250/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp1250_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-cp1251/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-cp1251_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-1/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-1_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-8/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-8_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-13/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-13_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-iso8859-15/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-iso8859-15_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-koi8r/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-koi8r_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-nls-utf8/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-nls-utf8_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-6lowpan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-6lowpan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bluetooth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ath3k/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ath3k_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bluetooth-6lowpan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bluetooth-6lowpan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-btmrvl/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-btmrvl_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-dma-buf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-dma-buf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-93cx6/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-eeprom-93cx6_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at24/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-eeprom-at24_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-eeprom-at25/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-eeprom-at25_installed WARNING: kmod-gpio-dev is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-dev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-dev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-mcp23s08/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-mcp23s08_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-nxp-74hc164/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-nxp-74hc164_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pca953x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-pca953x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-pcf857x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-pcf857x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ppdev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ppdev_installed WARNING: kmod-parport-pc is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-parport-pc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-parport-pc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-lp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-lp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mmc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sdhci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sdhci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-softdog/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-softdog_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ssb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ssb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bcma/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bcma_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-ds1307/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-rtc-ds1307_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-rtc-pcf8563/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-rtc-pcf8563_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdtests/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mtdtests_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdoops/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mtdoops_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mtdram/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mtdram_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-serial-8250_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-serial-8250-exar/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-serial-8250-exar_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-regmap-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-regmap-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-regmap-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-regmap-i2c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ikconfig/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ikconfig_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-zram/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-zram_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pps_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pps-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-pps-ldisc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-pps-ldisc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ptp/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ptp_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-random-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-random-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-gpio-beeper/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-gpio-beeper_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-echo/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-echo_installed WARNING: kmod-bmp085 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bmp085_installed WARNING: kmod-bmp085-i2c is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-i2c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bmp085-i2c_installed WARNING: kmod-bmp085-spi is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-bmp085-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-bmp085-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tpm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-atmel/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tpm-i2c-atmel_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-tpm-i2c-infineon/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-tpm-i2c-infineon_installed WARNING: kmod-w83627hf-wdt is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w83627hf-wdt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w83627hf-wdt_installed WARNING: kmod-itco-wdt is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-itco-wdt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-itco-wdt_installed WARNING: kmod-it87-wdt is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-it87-wdt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-it87-wdt_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ac97/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ac97_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-mpu401/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-mpu401_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-seq/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-seq_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-ens1371/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-ens1371_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-i8x0/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-i8x0_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-via82xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-via82xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-soc-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-soc-ac97/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-soc-ac97_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-dummy/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-dummy_installed WARNING: kmod-sound-hda-core is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-core_installed WARNING: kmod-sound-hda-codec-realtek is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-realtek/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-realtek_installed WARNING: kmod-sound-hda-codec-cmedia is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cmedia/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-cmedia_installed WARNING: kmod-sound-hda-codec-analog is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-analog/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-analog_installed WARNING: kmod-sound-hda-codec-idt is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-idt/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-idt_installed WARNING: kmod-sound-hda-codec-si3054 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-si3054/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-si3054_installed WARNING: kmod-sound-hda-codec-cirrus is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-cirrus/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-cirrus_installed WARNING: kmod-sound-hda-codec-ca0110 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0110/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-ca0110_installed WARNING: kmod-sound-hda-codec-ca0132 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-ca0132/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-ca0132_installed WARNING: kmod-sound-hda-codec-conexant is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-conexant/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-conexant_installed WARNING: kmod-sound-hda-codec-via is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-via/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-via_installed WARNING: kmod-sound-hda-codec-hdmi is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-sound-hda-codec-hdmi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-sound-hda-codec-hdmi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mmc-spi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mmc-spi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-bitbang/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-spi-bitbang_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-spi-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-spi-dev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-spi-dev_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ledtrig-usbport/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-ledtrig-usbport_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-phy-nop/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-phy-nop_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-phy-ath79-usb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-phy-ath79-usb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-uhci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-uhci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-ohci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ohci-pci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-ohci-pci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-ehci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-ehci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb2-pci/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb2-pci_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-dwc2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-dwc3/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-dwc3_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-acm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-acm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-wdm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-wdm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-audio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-audio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-printer/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-printer_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-belkin/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-belkin_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ch341/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-ch341_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-edgeport/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-edgeport_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ftdi/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-ftdi_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-garmin/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-garmin_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-simple/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-simple_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ti-usb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-ti-usb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ipw/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-ipw_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mct/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-mct_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7720/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-mos7720_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-mos7840/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-mos7840_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-pl2303/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-pl2303_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cp210x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-cp210x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-ark3116/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-ark3116_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-oti6858/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-oti6858_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-sierrawireless/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-sierrawireless_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-visor/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-visor_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-cypress-m8/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-cypress-m8_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-keyspan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-keyspan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-wwan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-wwan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-option/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-option_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-serial-qualcomm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-serial-qualcomm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-storage_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-extras/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-storage-extras_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-storage-uas/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-storage-uas_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-atm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-speedtouch/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-atm-speedtouch_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-ueagle/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-atm-ueagle_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-atm-cxacru/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-atm-cxacru_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-asix_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-asix-ax88179/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-asix-ax88179_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-hso/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-hso_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kaweth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-kaweth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pegasus/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-pegasus_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-mcs7830/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-mcs7830_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-smsc95xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-smsc95xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-dm9601-ether/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-dm9601-ether_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ether/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-cdc-ether_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-eem/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-cdc-eem_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-subset/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-cdc-subset_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-qmi-wwan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-qmi-wwan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8150/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-rtl8150_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rtl8152/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-rtl8152_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sr9700/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-sr9700_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-rndis/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-rndis_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-mbim/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-cdc-mbim_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-cdc-ncm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-cdc-ncm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-huawei-cdc-ncm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-huawei-cdc-ncm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-sierrawireless/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-sierrawireless_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-ipheth/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-ipheth_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-kalmia/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-kalmia_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-net-pl/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-net-pl_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-hid/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-hid_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-yealink/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-yealink_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-cm109/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-cm109_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-test/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-test_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usbip_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-client/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usbip-client_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbip-server/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usbip-server_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-chipidea_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb-chipidea2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb-chipidea2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usbmon/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usbmon_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-usb3/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-usb3_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-chaoskey/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-chaoskey_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-videobuf2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-videobuf2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-cpia2/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-cpia2_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-pwc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-pwc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-uvc/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-uvc_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-core/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-core_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-conex/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-conex_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-etoms/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-etoms_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-finepix/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-finepix_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mars/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-mars_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-mr97310a/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-mr97310a_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov519/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-ov519_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-ov534_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-ov534-9/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-ov534-9_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac207/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-pac207_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-pac7311/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-pac7311_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-se401/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-se401_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sn9c20x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sn9c20x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sonixb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sonixj/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sonixj_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca500/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca500_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca501/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca501_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca505/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca505_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca506/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca506_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca508/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca508_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-spca561/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-spca561_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sq905_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sq905c/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sq905c_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stk014/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-stk014_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-sunplus/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-sunplus_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-t613/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-t613_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-tv8532/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-tv8532_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-vc032x/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-vc032x_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-zc3xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-zc3xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-m5602/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-m5602_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-stv06xx/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-stv06xx_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-gl860/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-gl860_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-jeilinj/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-jeilinj_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-video-gspca-konica/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-video-gspca-konica_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-gpio/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-master-gpio_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2482/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-master-ds2482_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-master-ds2490/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-master-ds2490_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-therm/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-therm_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-smem/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-smem_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2431/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-ds2431_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2433/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-ds2433_installed WARNING: kmod-w1-slave-ds2760 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2760/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-ds2760_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-w1-slave-ds2413/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-w1-slave-ds2413_installed WARNING: kmod-net-prism54 is not available in the kernel config - generating empty package mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-prism54/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-net-prism54_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-net-rtl8192su/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-net-rtl8192su_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ieee802154_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mac802154/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mac802154_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-fakelb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-fakelb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-atusb/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-atusb_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-at86rf230/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-at86rf230_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-mrf24j40/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-mrf24j40_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-cc2520/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-cc2520_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ca8210/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ca8210_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-ieee802154-6lowpan/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-ieee802154-6lowpan_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.pkgdir/kmod-leds-reset/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-leds-reset_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/packages -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: [Makefile:63: compile] Error 1 (ignored) make[3]: Leaving directory '/home/build/openwrt/package/linux' time: package/linux/compile#68.22#67.11#576.62 make[3]: Entering directory '/home/build/openwrt/feeds/base/package/kernel/cryptodev-linux' mkdir -p /home/build/openwrt/dl SHELL= flock /home/build/openwrt/tmp/.cryptodev-linux-1.11.tar.gz.flock -c ' /home/build/openwrt/scripts/download.pl "/home/build/openwrt/dl" "cryptodev-linux-1.11.tar.gz" "d71fd8dafc40147586f5bc6acca8fce5088d9c576d1142fe5aeb7b0813186a11" "" "https://codeload.github.com/cryptodev-linux/cryptodev-linux/tar.gz/cryptodev-linux-1.11?" ' + curl -f --connect-timeout 20 --retry 5 --location --insecure https://codeload.github.com/cryptodev-linux/cryptodev-linux/tar.gz/cryptodev-linux-1.11?/cryptodev-linux-1.11.tar.gz % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 56876 100 56876 0 0 172k 0 --:--:-- --:--:-- --:--:-- 172k touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.prepared_90e835b6e003bd43b5c7be33bfaba203_18f1e190c5d53547fed41a3eaa76e9e9_check . /home/build/openwrt/include/shell.sh; gzip -dc /home/build/openwrt/dl/cryptodev-linux-1.11.tar.gz | tar -C /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.. -xf - [ ! -d ./src/ ] || cp -fpR ./src/. /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11 Applying ./patches/010-fix-build-for-kernel-v5.9-rc1.patch using plaintext: patching file zc.c touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.prepared_90e835b6e003bd43b5c7be33bfaba203_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.cryptodev-linux_installed touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.configured_68b329da9893e34099c7d8ad5cb9c940 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.built_check cat /dev/null > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/symvers/cryptodev-linux.symvers; for subdir in .; do cat /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/symvers/*.symvers 2>/dev/null > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/$subdir/Module.symvers; done make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11 KCFLAGS="-ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl=target-mips_24kc_musl" HOSTCFLAGS="-O2 -I/home/build/openwrt/staging_dir/host/include -I/home/build/openwrt/staging_dir/hostpkg/include -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/host/include -Wall -Wmissing-prototypes -Wstrict-prototypes" CROSS_COMPILE="mips-openwrt-linux-musl-" ARCH="mips" KBUILD_HAVE_NLS=no KBUILD_BUILD_USER="builder" KBUILD_BUILD_HOST="buildhost" KBUILD_BUILD_TIMESTAMP="Thu Sep 10 16:52:15 2020" KBUILD_BUILD_VERSION="0" HOST_LOADLIBES="-L/home/build/openwrt/staging_dir/host/lib" KBUILD_HOSTLDLIBS="-L/home/build/openwrt/staging_dir/host/lib" CONFIG_SHELL="bash" V='' cmd_syscalls= KERNELRELEASE=5.4.63 KERNEL_DIR="/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63" make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11' make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63 M=/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11 ARCH=mips CROSS_COMPILE=mips-openwrt-linux-musl- modules make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63' CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ioctl.o CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/main.o CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/cryptlib.o CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/authenc.o CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/zc.o CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/util.o LD [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/cryptodev.o Building modules, stage 2. MODPOST 1 modules CC [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/cryptodev.mod.o LD [M] /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/cryptodev.ko make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/linux-5.4.63' make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11' for subdir in .; do realdir=$(readlink -f /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11); grep -F /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11 /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/$subdir/Module.symvers >> /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/Module.symvers.tmp; [ "/home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11" = "$realdir" ] || grep -F $realdir /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/$subdir/Module.symvers >> /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/Module.symvers.tmp; done; sort -u /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/Module.symvers.tmp > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/Module.symvers; mv /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/Module.symvers /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/symvers/cryptodev-linux.symvers touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev/etc/modules.d; ( echo "cryptodev cryptodev_verbosity=-1"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev/etc/modules.d/50-cryptodev; touch /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.pkgdir/kmod-cryptodev/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.kmod-cryptodev_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev/etc/modules.d; ( echo "cryptodev cryptodev_verbosity=-1"; ) > /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev/etc/modules.d/50-cryptodev; find /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev/lib/modules/5.4.63/cryptodev.ko: relocatable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev /home/build/openwrt/bin/targets/ath79/generic/packages Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/ipkg-mips_24kc/kmod-cryptodev into /home/build/openwrt/bin/targets/ath79/generic/packages/kmod-cryptodev_5.4.63+1.11-ath79-1_mips_24kc.ipk rm -rf /home/build/openwrt/tmp/stage-cryptodev-linux mkdir -p /home/build/openwrt/tmp/stage-cryptodev-linux/host /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages install -d -m0755 /home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include/crypto cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/crypto/cryptodev.h /home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include/crypto/ find /home/build/openwrt/tmp/stage-cryptodev-linux -name '*.la' | xargs -r rm -f; if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/cryptodev-linux.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/cryptodev-linux.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi if [ -d /home/build/openwrt/tmp/stage-cryptodev-linux ]; then (cd /home/build/openwrt/tmp/stage-cryptodev-linux; find ./ > /home/build/openwrt/tmp/stage-cryptodev-linux.files); SHELL= flock /home/build/openwrt/tmp/.staging-dir.flock -c ' mv /home/build/openwrt/tmp/stage-cryptodev-linux.files /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/cryptodev-linux.list && cp -fpR /home/build/openwrt/tmp/stage-cryptodev-linux/* /home/build/openwrt/staging_dir/target-mips_24kc_musl/; '; fi rm -rf /home/build/openwrt/tmp/stage-cryptodev-linux touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.cryptodev-linux_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/linux-ath79_generic/cryptodev-linux-cryptodev-linux-1.11 -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/kernel/cryptodev-linux' time: package/feeds/base/cryptodev-linux/compile#4.00#1.41#6.72 make[3]: Entering directory '/home/build/openwrt/feeds/base/package/libs/openssl' bash: md5: command not found mkdir -p /home/build/openwrt/dl SHELL= flock /home/build/openwrt/tmp/.openssl-1.1.1k.tar.gz.flock -c ' /home/build/openwrt/scripts/download.pl "/home/build/openwrt/dl" "openssl-1.1.1k.tar.gz" "892a0875b9872acd04a9fde79b1f943075d5ea162415de3047c327df33fbaee5" "" "http://www.openssl.org/source/" "http://www.openssl.org/source/old/1.1.1/" "http://ftp.fi.muni.cz/pub/openssl/source/" "http://ftp.fi.muni.cz/pub/openssl/source/old/1.1.1/" "ftp://ftp.pca.dfn.de/pub/tools/net/openssl/source/" "ftp://ftp.pca.dfn.de/pub/tools/net/openssl/source/old/1.1.1/" ' + curl -f --connect-timeout 20 --retry 5 --location --insecure http://www.openssl.org/source/openssl-1.1.1k.tar.gz % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 341 100 341 0 0 695 0 --:--:-- --:--:-- --:--:-- 694 100 9593k 100 9593k 0 0 12.8M 0 --:--:-- --:--:-- --:--:-- 12.8M touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.prepared_803e55f2c41a2085134608352241896d_18f1e190c5d53547fed41a3eaa76e9e9_check . /home/build/openwrt/include/shell.sh; gzip -dc /home/build/openwrt/dl/openssl-1.1.1k.tar.gz | tar -C /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.. -xf - [ ! -d ./src/ ] || cp -fpR ./src/. /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k Applying ./patches/100-Configure-afalg-support.patch using plaintext: patching file Configure Applying ./patches/110-openwrt_targets.patch using plaintext: patching file Configurations/25-openwrt.conf Applying ./patches/120-strip-cflags-from-binary.patch using plaintext: patching file crypto/build.info Applying ./patches/130-dont-build-tests-fuzz.patch using plaintext: patching file Configure Applying ./patches/140-allow-prefer-chacha20.patch using plaintext: patching file include/openssl/ssl.h patching file ssl/ssl_ciph.c Applying ./patches/150-openssl.cnf-add-engines-conf.patch using plaintext: patching file apps/openssl.cnf Applying ./patches/400-eng_devcrypto-save-ioctl-if-EVP_MD_.FLAG_ONESHOT.patch using plaintext: patching file crypto/engine/eng_devcrypto.c Applying ./patches/410-eng_devcrypto-add-configuration-options.patch using plaintext: patching file crypto/engine/eng_devcrypto.c Hunk #13 succeeded at 1122 (offset 13 lines). Applying ./patches/420-eng_devcrypto-add-command-to-dump-driver-info.patch using plaintext: patching file crypto/engine/eng_devcrypto.c Applying ./patches/430-e_devcrypto-make-the-dev-crypto-engine-dynamic.patch using plaintext: patching file crypto/engine/build.info patching file crypto/init.c patching file engines/build.info patching file engines/e_devcrypto.c (renamed from crypto/engine/eng_devcrypto.c) Applying ./patches/500-e_devcrypto-default-to-not-use-digests-in-engine.patch using plaintext: patching file engines/e_devcrypto.c Applying ./patches/510-e_devcrypto-ignore-error-when-closing-session.patch using plaintext: patching file engines/e_devcrypto.c touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.prepared_803e55f2c41a2085134608352241896d_18f1e190c5d53547fed41a3eaa76e9e9 rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.configured_* rm -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.openssl_installed (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k; ./Configure linux-mips-openwrt --prefix=/usr --libdir=lib --openssldir=/etc/ssl --cross-compile-prefix="mips-openwrt-linux-musl-" -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,--gc-sections shared no-blake2 -DOPENSSL_PREFER_CHACHA_OVER_GCM no-async no-ec2m no-aria no-sm2 no-sm3 no-sm4 no-camellia no-idea no-seed no-mdc2 no-whirlpool no-rfc3779 -DOPENSSL_SMALL_FOOTPRINT enable-devcryptoeng no-hw-padlock no-dtls no-comp no-nextprotoneg && { [ -f /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.configured_6dce8b6080199944e9a113c94beba996_ ] || make clean; } ) Configuring OpenSSL version 1.1.1k (0x101010bfL) for linux-mips-openwrt Using os-specific seed configuration Creating configdata.pm Creating Makefile ********************************************************************** *** *** *** OpenSSL has been successfully configured *** *** *** *** If you encounter a problem while building, please open an *** *** issue on GitHub *** *** and include the output from the following command: *** *** *** *** perl configdata.pm --dump *** *** *** *** (If you are new to OpenSSL, you might want to consult the *** *** 'Troubleshooting' section in the INSTALL file first) *** *** *** ********************************************************************** make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' rm -f libcrypto.so.1.1 rm -f libcrypto.so rm -f libssl.so.1.1 rm -f libssl.so rm -f apps/libapps.a libcrypto.a libssl.a rm -f *.map rm -f apps/openssl engines/afalg.so engines/capi.so engines/dasync.so engines/devcrypto.so engines/ossltest.so apps/CA.pl apps/tsget.pl tools/c_rehash util/shlib_wrap.sh rm -f include/crypto/bn_conf.h include/crypto/dso_conf.h include/openssl/opensslconf.h apps/CA.pl apps/progs.h apps/tsget.pl crypto/aes/aes-mips.S crypto/bn/bn-mips.S crypto/bn/mips-mont.S crypto/buildinf.h crypto/sha/sha1-mips.S crypto/sha/sha256-mips.S libcrypto.map libssl.map tools/c_rehash util/shlib_wrap.sh rm -f `find . -name '*.d' \! -name '.*' \! -type d -print` rm -f `find . -name '*.o' \! -name '.*' \! -type d -print` rm -f core rm -f tags TAGS doc-nits rm -f -r test/test-runs rm -f openssl.pc libcrypto.pc libssl.pc rm -f `find . -type l \! -name '.*' -print` rm -f ../openssl-1.1.1k.tar make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.configured_6dce8b6080199944e9a113c94beba996_ rm -f /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.built touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.built_check make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k CC="mips-openwrt-linux-musl-gcc" SOURCE_DATE_EPOCH=1599756735 OPENWRT_OPTIMIZATION_FLAGS="-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections" all make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" include/crypto/bn_conf.h.in > include/crypto/bn_conf.h /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" include/crypto/dso_conf.h.in > include/crypto/dso_conf.h /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" include/openssl/opensslconf.h.in > include/openssl/opensslconf.h make depend && make _all make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/app_rand.d.tmp -MT apps/app_rand.o -c -o apps/app_rand.o apps/app_rand.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/apps.d.tmp -MT apps/apps.o -c -o apps/apps.o apps/apps.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/bf_prefix.d.tmp -MT apps/bf_prefix.o -c -o apps/bf_prefix.o apps/bf_prefix.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/opt.d.tmp -MT apps/opt.o -c -o apps/opt.o apps/opt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/s_cb.d.tmp -MT apps/s_cb.o -c -o apps/s_cb.o apps/s_cb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/s_socket.d.tmp -MT apps/s_socket.o -c -o apps/s_socket.o apps/s_socket.c mips-openwrt-linux-musl-ar r apps/libapps.a apps/app_rand.o apps/apps.o apps/bf_prefix.o apps/opt.o apps/s_cb.o apps/s_socket.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-ar: creating apps/libapps.a mips-openwrt-linux-musl-ranlib apps/libapps.a || echo Never mind. CC="mips-openwrt-linux-musl-gcc" /usr/bin/perl crypto/aes/asm/aes-mips.pl o32 crypto/aes/aes-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -c -o crypto/aes/aes-mips.o crypto/aes/aes-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_cbc.d.tmp -MT crypto/aes/aes_cbc.o -c -o crypto/aes/aes_cbc.o crypto/aes/aes_cbc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_cfb.d.tmp -MT crypto/aes/aes_cfb.o -c -o crypto/aes/aes_cfb.o crypto/aes/aes_cfb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_ecb.d.tmp -MT crypto/aes/aes_ecb.o -c -o crypto/aes/aes_ecb.o crypto/aes/aes_ecb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_ige.d.tmp -MT crypto/aes/aes_ige.o -c -o crypto/aes/aes_ige.o crypto/aes/aes_ige.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_misc.d.tmp -MT crypto/aes/aes_misc.o -c -o crypto/aes/aes_misc.o crypto/aes/aes_misc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_ofb.d.tmp -MT crypto/aes/aes_ofb.o -c -o crypto/aes/aes_ofb.o crypto/aes/aes_ofb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/aes/aes_wrap.d.tmp -MT crypto/aes/aes_wrap.o -c -o crypto/aes/aes_wrap.o crypto/aes/aes_wrap.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_bitstr.d.tmp -MT crypto/asn1/a_bitstr.o -c -o crypto/asn1/a_bitstr.o crypto/asn1/a_bitstr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_d2i_fp.d.tmp -MT crypto/asn1/a_d2i_fp.o -c -o crypto/asn1/a_d2i_fp.o crypto/asn1/a_d2i_fp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_digest.d.tmp -MT crypto/asn1/a_digest.o -c -o crypto/asn1/a_digest.o crypto/asn1/a_digest.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_dup.d.tmp -MT crypto/asn1/a_dup.o -c -o crypto/asn1/a_dup.o crypto/asn1/a_dup.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_gentm.d.tmp -MT crypto/asn1/a_gentm.o -c -o crypto/asn1/a_gentm.o crypto/asn1/a_gentm.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_i2d_fp.d.tmp -MT crypto/asn1/a_i2d_fp.o -c -o crypto/asn1/a_i2d_fp.o crypto/asn1/a_i2d_fp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_int.d.tmp -MT crypto/asn1/a_int.o -c -o crypto/asn1/a_int.o crypto/asn1/a_int.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_mbstr.d.tmp -MT crypto/asn1/a_mbstr.o -c -o crypto/asn1/a_mbstr.o crypto/asn1/a_mbstr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_object.d.tmp -MT crypto/asn1/a_object.o -c -o crypto/asn1/a_object.o crypto/asn1/a_object.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_octet.d.tmp -MT crypto/asn1/a_octet.o -c -o crypto/asn1/a_octet.o crypto/asn1/a_octet.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_print.d.tmp -MT crypto/asn1/a_print.o -c -o crypto/asn1/a_print.o crypto/asn1/a_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_sign.d.tmp -MT crypto/asn1/a_sign.o -c -o crypto/asn1/a_sign.o crypto/asn1/a_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_strex.d.tmp -MT crypto/asn1/a_strex.o -c -o crypto/asn1/a_strex.o crypto/asn1/a_strex.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_strnid.d.tmp -MT crypto/asn1/a_strnid.o -c -o crypto/asn1/a_strnid.o crypto/asn1/a_strnid.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_time.d.tmp -MT crypto/asn1/a_time.o -c -o crypto/asn1/a_time.o crypto/asn1/a_time.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_type.d.tmp -MT crypto/asn1/a_type.o -c -o crypto/asn1/a_type.o crypto/asn1/a_type.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_utctm.d.tmp -MT crypto/asn1/a_utctm.o -c -o crypto/asn1/a_utctm.o crypto/asn1/a_utctm.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_utf8.d.tmp -MT crypto/asn1/a_utf8.o -c -o crypto/asn1/a_utf8.o crypto/asn1/a_utf8.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/a_verify.d.tmp -MT crypto/asn1/a_verify.o -c -o crypto/asn1/a_verify.o crypto/asn1/a_verify.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/ameth_lib.d.tmp -MT crypto/asn1/ameth_lib.o -c -o crypto/asn1/ameth_lib.o crypto/asn1/ameth_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn1_err.d.tmp -MT crypto/asn1/asn1_err.o -c -o crypto/asn1/asn1_err.o crypto/asn1/asn1_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn1_gen.d.tmp -MT crypto/asn1/asn1_gen.o -c -o crypto/asn1/asn1_gen.o crypto/asn1/asn1_gen.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn1_item_list.d.tmp -MT crypto/asn1/asn1_item_list.o -c -o crypto/asn1/asn1_item_list.o crypto/asn1/asn1_item_list.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn1_lib.d.tmp -MT crypto/asn1/asn1_lib.o -c -o crypto/asn1/asn1_lib.o crypto/asn1/asn1_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn1_par.d.tmp -MT crypto/asn1/asn1_par.o -c -o crypto/asn1/asn1_par.o crypto/asn1/asn1_par.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn_mime.d.tmp -MT crypto/asn1/asn_mime.o -c -o crypto/asn1/asn_mime.o crypto/asn1/asn_mime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn_moid.d.tmp -MT crypto/asn1/asn_moid.o -c -o crypto/asn1/asn_moid.o crypto/asn1/asn_moid.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn_mstbl.d.tmp -MT crypto/asn1/asn_mstbl.o -c -o crypto/asn1/asn_mstbl.o crypto/asn1/asn_mstbl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/asn_pack.d.tmp -MT crypto/asn1/asn_pack.o -c -o crypto/asn1/asn_pack.o crypto/asn1/asn_pack.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/bio_asn1.d.tmp -MT crypto/asn1/bio_asn1.o -c -o crypto/asn1/bio_asn1.o crypto/asn1/bio_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/bio_ndef.d.tmp -MT crypto/asn1/bio_ndef.o -c -o crypto/asn1/bio_ndef.o crypto/asn1/bio_ndef.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/d2i_pr.d.tmp -MT crypto/asn1/d2i_pr.o -c -o crypto/asn1/d2i_pr.o crypto/asn1/d2i_pr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/d2i_pu.d.tmp -MT crypto/asn1/d2i_pu.o -c -o crypto/asn1/d2i_pu.o crypto/asn1/d2i_pu.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/evp_asn1.d.tmp -MT crypto/asn1/evp_asn1.o -c -o crypto/asn1/evp_asn1.o crypto/asn1/evp_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/f_int.d.tmp -MT crypto/asn1/f_int.o -c -o crypto/asn1/f_int.o crypto/asn1/f_int.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/f_string.d.tmp -MT crypto/asn1/f_string.o -c -o crypto/asn1/f_string.o crypto/asn1/f_string.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/i2d_pr.d.tmp -MT crypto/asn1/i2d_pr.o -c -o crypto/asn1/i2d_pr.o crypto/asn1/i2d_pr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/i2d_pu.d.tmp -MT crypto/asn1/i2d_pu.o -c -o crypto/asn1/i2d_pu.o crypto/asn1/i2d_pu.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/n_pkey.d.tmp -MT crypto/asn1/n_pkey.o -c -o crypto/asn1/n_pkey.o crypto/asn1/n_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/nsseq.d.tmp -MT crypto/asn1/nsseq.o -c -o crypto/asn1/nsseq.o crypto/asn1/nsseq.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/p5_pbe.d.tmp -MT crypto/asn1/p5_pbe.o -c -o crypto/asn1/p5_pbe.o crypto/asn1/p5_pbe.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/p5_pbev2.d.tmp -MT crypto/asn1/p5_pbev2.o -c -o crypto/asn1/p5_pbev2.o crypto/asn1/p5_pbev2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/p5_scrypt.d.tmp -MT crypto/asn1/p5_scrypt.o -c -o crypto/asn1/p5_scrypt.o crypto/asn1/p5_scrypt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/p8_pkey.d.tmp -MT crypto/asn1/p8_pkey.o -c -o crypto/asn1/p8_pkey.o crypto/asn1/p8_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/t_bitst.d.tmp -MT crypto/asn1/t_bitst.o -c -o crypto/asn1/t_bitst.o crypto/asn1/t_bitst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/t_pkey.d.tmp -MT crypto/asn1/t_pkey.o -c -o crypto/asn1/t_pkey.o crypto/asn1/t_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/t_spki.d.tmp -MT crypto/asn1/t_spki.o -c -o crypto/asn1/t_spki.o crypto/asn1/t_spki.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_dec.d.tmp -MT crypto/asn1/tasn_dec.o -c -o crypto/asn1/tasn_dec.o crypto/asn1/tasn_dec.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_enc.d.tmp -MT crypto/asn1/tasn_enc.o -c -o crypto/asn1/tasn_enc.o crypto/asn1/tasn_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_fre.d.tmp -MT crypto/asn1/tasn_fre.o -c -o crypto/asn1/tasn_fre.o crypto/asn1/tasn_fre.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_new.d.tmp -MT crypto/asn1/tasn_new.o -c -o crypto/asn1/tasn_new.o crypto/asn1/tasn_new.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_prn.d.tmp -MT crypto/asn1/tasn_prn.o -c -o crypto/asn1/tasn_prn.o crypto/asn1/tasn_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_scn.d.tmp -MT crypto/asn1/tasn_scn.o -c -o crypto/asn1/tasn_scn.o crypto/asn1/tasn_scn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_typ.d.tmp -MT crypto/asn1/tasn_typ.o -c -o crypto/asn1/tasn_typ.o crypto/asn1/tasn_typ.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/tasn_utl.d.tmp -MT crypto/asn1/tasn_utl.o -c -o crypto/asn1/tasn_utl.o crypto/asn1/tasn_utl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_algor.d.tmp -MT crypto/asn1/x_algor.o -c -o crypto/asn1/x_algor.o crypto/asn1/x_algor.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_bignum.d.tmp -MT crypto/asn1/x_bignum.o -c -o crypto/asn1/x_bignum.o crypto/asn1/x_bignum.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_info.d.tmp -MT crypto/asn1/x_info.o -c -o crypto/asn1/x_info.o crypto/asn1/x_info.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_int64.d.tmp -MT crypto/asn1/x_int64.o -c -o crypto/asn1/x_int64.o crypto/asn1/x_int64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_long.d.tmp -MT crypto/asn1/x_long.o -c -o crypto/asn1/x_long.o crypto/asn1/x_long.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_pkey.d.tmp -MT crypto/asn1/x_pkey.o -c -o crypto/asn1/x_pkey.o crypto/asn1/x_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_sig.d.tmp -MT crypto/asn1/x_sig.o -c -o crypto/asn1/x_sig.o crypto/asn1/x_sig.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_spki.d.tmp -MT crypto/asn1/x_spki.o -c -o crypto/asn1/x_spki.o crypto/asn1/x_spki.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/asn1/x_val.d.tmp -MT crypto/asn1/x_val.o -c -o crypto/asn1/x_val.o crypto/asn1/x_val.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/arch/async_null.d.tmp -MT crypto/async/arch/async_null.o -c -o crypto/async/arch/async_null.o crypto/async/arch/async_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/arch/async_posix.d.tmp -MT crypto/async/arch/async_posix.o -c -o crypto/async/arch/async_posix.o crypto/async/arch/async_posix.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/arch/async_win.d.tmp -MT crypto/async/arch/async_win.o -c -o crypto/async/arch/async_win.o crypto/async/arch/async_win.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/async.d.tmp -MT crypto/async/async.o -c -o crypto/async/async.o crypto/async/async.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/async_err.d.tmp -MT crypto/async/async_err.o -c -o crypto/async/async_err.o crypto/async/async_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/async/async_wait.d.tmp -MT crypto/async/async_wait.o -c -o crypto/async/async_wait.o crypto/async/async_wait.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bf/bf_cfb64.d.tmp -MT crypto/bf/bf_cfb64.o -c -o crypto/bf/bf_cfb64.o crypto/bf/bf_cfb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bf/bf_ecb.d.tmp -MT crypto/bf/bf_ecb.o -c -o crypto/bf/bf_ecb.o crypto/bf/bf_ecb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bf/bf_enc.d.tmp -MT crypto/bf/bf_enc.o -c -o crypto/bf/bf_enc.o crypto/bf/bf_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bf/bf_ofb64.d.tmp -MT crypto/bf/bf_ofb64.o -c -o crypto/bf/bf_ofb64.o crypto/bf/bf_ofb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bf/bf_skey.d.tmp -MT crypto/bf/bf_skey.o -c -o crypto/bf/bf_skey.o crypto/bf/bf_skey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/b_addr.d.tmp -MT crypto/bio/b_addr.o -c -o crypto/bio/b_addr.o crypto/bio/b_addr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/b_dump.d.tmp -MT crypto/bio/b_dump.o -c -o crypto/bio/b_dump.o crypto/bio/b_dump.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/b_print.d.tmp -MT crypto/bio/b_print.o -c -o crypto/bio/b_print.o crypto/bio/b_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/b_sock.d.tmp -MT crypto/bio/b_sock.o -c -o crypto/bio/b_sock.o crypto/bio/b_sock.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/b_sock2.d.tmp -MT crypto/bio/b_sock2.o -c -o crypto/bio/b_sock2.o crypto/bio/b_sock2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bf_buff.d.tmp -MT crypto/bio/bf_buff.o -c -o crypto/bio/bf_buff.o crypto/bio/bf_buff.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bf_lbuf.d.tmp -MT crypto/bio/bf_lbuf.o -c -o crypto/bio/bf_lbuf.o crypto/bio/bf_lbuf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bf_nbio.d.tmp -MT crypto/bio/bf_nbio.o -c -o crypto/bio/bf_nbio.o crypto/bio/bf_nbio.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bf_null.d.tmp -MT crypto/bio/bf_null.o -c -o crypto/bio/bf_null.o crypto/bio/bf_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bio_cb.d.tmp -MT crypto/bio/bio_cb.o -c -o crypto/bio/bio_cb.o crypto/bio/bio_cb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bio_err.d.tmp -MT crypto/bio/bio_err.o -c -o crypto/bio/bio_err.o crypto/bio/bio_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bio_lib.d.tmp -MT crypto/bio/bio_lib.o -c -o crypto/bio/bio_lib.o crypto/bio/bio_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bio_meth.d.tmp -MT crypto/bio/bio_meth.o -c -o crypto/bio/bio_meth.o crypto/bio/bio_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_acpt.d.tmp -MT crypto/bio/bss_acpt.o -c -o crypto/bio/bss_acpt.o crypto/bio/bss_acpt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_bio.d.tmp -MT crypto/bio/bss_bio.o -c -o crypto/bio/bss_bio.o crypto/bio/bss_bio.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_conn.d.tmp -MT crypto/bio/bss_conn.o -c -o crypto/bio/bss_conn.o crypto/bio/bss_conn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_dgram.d.tmp -MT crypto/bio/bss_dgram.o -c -o crypto/bio/bss_dgram.o crypto/bio/bss_dgram.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_fd.d.tmp -MT crypto/bio/bss_fd.o -c -o crypto/bio/bss_fd.o crypto/bio/bss_fd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_file.d.tmp -MT crypto/bio/bss_file.o -c -o crypto/bio/bss_file.o crypto/bio/bss_file.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_log.d.tmp -MT crypto/bio/bss_log.o -c -o crypto/bio/bss_log.o crypto/bio/bss_log.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_mem.d.tmp -MT crypto/bio/bss_mem.o -c -o crypto/bio/bss_mem.o crypto/bio/bss_mem.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_null.d.tmp -MT crypto/bio/bss_null.o -c -o crypto/bio/bss_null.o crypto/bio/bss_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bio/bss_sock.d.tmp -MT crypto/bio/bss_sock.o -c -o crypto/bio/bss_sock.o crypto/bio/bss_sock.c CC="mips-openwrt-linux-musl-gcc" /usr/bin/perl crypto/bn/asm/mips.pl o32 crypto/bn/bn-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -c -o crypto/bn/bn-mips.o crypto/bn/bn-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_add.d.tmp -MT crypto/bn/bn_add.o -c -o crypto/bn/bn_add.o crypto/bn/bn_add.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_blind.d.tmp -MT crypto/bn/bn_blind.o -c -o crypto/bn/bn_blind.o crypto/bn/bn_blind.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_const.d.tmp -MT crypto/bn/bn_const.o -c -o crypto/bn/bn_const.o crypto/bn/bn_const.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_ctx.d.tmp -MT crypto/bn/bn_ctx.o -c -o crypto/bn/bn_ctx.o crypto/bn/bn_ctx.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_depr.d.tmp -MT crypto/bn/bn_depr.o -c -o crypto/bn/bn_depr.o crypto/bn/bn_depr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_dh.d.tmp -MT crypto/bn/bn_dh.o -c -o crypto/bn/bn_dh.o crypto/bn/bn_dh.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_div.d.tmp -MT crypto/bn/bn_div.o -c -o crypto/bn/bn_div.o crypto/bn/bn_div.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_err.d.tmp -MT crypto/bn/bn_err.o -c -o crypto/bn/bn_err.o crypto/bn/bn_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_exp.d.tmp -MT crypto/bn/bn_exp.o -c -o crypto/bn/bn_exp.o crypto/bn/bn_exp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_exp2.d.tmp -MT crypto/bn/bn_exp2.o -c -o crypto/bn/bn_exp2.o crypto/bn/bn_exp2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_gcd.d.tmp -MT crypto/bn/bn_gcd.o -c -o crypto/bn/bn_gcd.o crypto/bn/bn_gcd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_gf2m.d.tmp -MT crypto/bn/bn_gf2m.o -c -o crypto/bn/bn_gf2m.o crypto/bn/bn_gf2m.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_intern.d.tmp -MT crypto/bn/bn_intern.o -c -o crypto/bn/bn_intern.o crypto/bn/bn_intern.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_kron.d.tmp -MT crypto/bn/bn_kron.o -c -o crypto/bn/bn_kron.o crypto/bn/bn_kron.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_lib.d.tmp -MT crypto/bn/bn_lib.o -c -o crypto/bn/bn_lib.o crypto/bn/bn_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_mod.d.tmp -MT crypto/bn/bn_mod.o -c -o crypto/bn/bn_mod.o crypto/bn/bn_mod.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_mont.d.tmp -MT crypto/bn/bn_mont.o -c -o crypto/bn/bn_mont.o crypto/bn/bn_mont.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_mpi.d.tmp -MT crypto/bn/bn_mpi.o -c -o crypto/bn/bn_mpi.o crypto/bn/bn_mpi.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_mul.d.tmp -MT crypto/bn/bn_mul.o -c -o crypto/bn/bn_mul.o crypto/bn/bn_mul.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_nist.d.tmp -MT crypto/bn/bn_nist.o -c -o crypto/bn/bn_nist.o crypto/bn/bn_nist.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_prime.d.tmp -MT crypto/bn/bn_prime.o -c -o crypto/bn/bn_prime.o crypto/bn/bn_prime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_print.d.tmp -MT crypto/bn/bn_print.o -c -o crypto/bn/bn_print.o crypto/bn/bn_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_rand.d.tmp -MT crypto/bn/bn_rand.o -c -o crypto/bn/bn_rand.o crypto/bn/bn_rand.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_recp.d.tmp -MT crypto/bn/bn_recp.o -c -o crypto/bn/bn_recp.o crypto/bn/bn_recp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_shift.d.tmp -MT crypto/bn/bn_shift.o -c -o crypto/bn/bn_shift.o crypto/bn/bn_shift.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_sqr.d.tmp -MT crypto/bn/bn_sqr.o -c -o crypto/bn/bn_sqr.o crypto/bn/bn_sqr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_sqrt.d.tmp -MT crypto/bn/bn_sqrt.o -c -o crypto/bn/bn_sqrt.o crypto/bn/bn_sqrt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_srp.d.tmp -MT crypto/bn/bn_srp.o -c -o crypto/bn/bn_srp.o crypto/bn/bn_srp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_word.d.tmp -MT crypto/bn/bn_word.o -c -o crypto/bn/bn_word.o crypto/bn/bn_word.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/bn/bn_x931p.d.tmp -MT crypto/bn/bn_x931p.o -c -o crypto/bn/bn_x931p.o crypto/bn/bn_x931p.c CC="mips-openwrt-linux-musl-gcc" /usr/bin/perl crypto/bn/asm/mips-mont.pl o32 crypto/bn/mips-mont.S mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -c -o crypto/bn/mips-mont.o crypto/bn/mips-mont.S mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/buffer/buf_err.d.tmp -MT crypto/buffer/buf_err.o -c -o crypto/buffer/buf_err.o crypto/buffer/buf_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/buffer/buffer.d.tmp -MT crypto/buffer/buffer.o -c -o crypto/buffer/buffer.o crypto/buffer/buffer.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cast/c_cfb64.d.tmp -MT crypto/cast/c_cfb64.o -c -o crypto/cast/c_cfb64.o crypto/cast/c_cfb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cast/c_ecb.d.tmp -MT crypto/cast/c_ecb.o -c -o crypto/cast/c_ecb.o crypto/cast/c_ecb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cast/c_enc.d.tmp -MT crypto/cast/c_enc.o -c -o crypto/cast/c_enc.o crypto/cast/c_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cast/c_ofb64.d.tmp -MT crypto/cast/c_ofb64.o -c -o crypto/cast/c_ofb64.o crypto/cast/c_ofb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cast/c_skey.d.tmp -MT crypto/cast/c_skey.o -c -o crypto/cast/c_skey.o crypto/cast/c_skey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/chacha/chacha_enc.d.tmp -MT crypto/chacha/chacha_enc.o -c -o crypto/chacha/chacha_enc.o crypto/chacha/chacha_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cmac/cm_ameth.d.tmp -MT crypto/cmac/cm_ameth.o -c -o crypto/cmac/cm_ameth.o crypto/cmac/cm_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cmac/cm_pmeth.d.tmp -MT crypto/cmac/cm_pmeth.o -c -o crypto/cmac/cm_pmeth.o crypto/cmac/cm_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cmac/cmac.d.tmp -MT crypto/cmac/cmac.o -c -o crypto/cmac/cmac.o crypto/cmac/cmac.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_asn1.d.tmp -MT crypto/cms/cms_asn1.o -c -o crypto/cms/cms_asn1.o crypto/cms/cms_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_att.d.tmp -MT crypto/cms/cms_att.o -c -o crypto/cms/cms_att.o crypto/cms/cms_att.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_cd.d.tmp -MT crypto/cms/cms_cd.o -c -o crypto/cms/cms_cd.o crypto/cms/cms_cd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_dd.d.tmp -MT crypto/cms/cms_dd.o -c -o crypto/cms/cms_dd.o crypto/cms/cms_dd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_enc.d.tmp -MT crypto/cms/cms_enc.o -c -o crypto/cms/cms_enc.o crypto/cms/cms_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_env.d.tmp -MT crypto/cms/cms_env.o -c -o crypto/cms/cms_env.o crypto/cms/cms_env.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_err.d.tmp -MT crypto/cms/cms_err.o -c -o crypto/cms/cms_err.o crypto/cms/cms_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_ess.d.tmp -MT crypto/cms/cms_ess.o -c -o crypto/cms/cms_ess.o crypto/cms/cms_ess.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_io.d.tmp -MT crypto/cms/cms_io.o -c -o crypto/cms/cms_io.o crypto/cms/cms_io.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_kari.d.tmp -MT crypto/cms/cms_kari.o -c -o crypto/cms/cms_kari.o crypto/cms/cms_kari.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_lib.d.tmp -MT crypto/cms/cms_lib.o -c -o crypto/cms/cms_lib.o crypto/cms/cms_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_pwri.d.tmp -MT crypto/cms/cms_pwri.o -c -o crypto/cms/cms_pwri.o crypto/cms/cms_pwri.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_sd.d.tmp -MT crypto/cms/cms_sd.o -c -o crypto/cms/cms_sd.o crypto/cms/cms_sd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cms/cms_smime.d.tmp -MT crypto/cms/cms_smime.o -c -o crypto/cms/cms_smime.o crypto/cms/cms_smime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_api.d.tmp -MT crypto/conf/conf_api.o -c -o crypto/conf/conf_api.o crypto/conf/conf_api.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_def.d.tmp -MT crypto/conf/conf_def.o -c -o crypto/conf/conf_def.o crypto/conf/conf_def.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_err.d.tmp -MT crypto/conf/conf_err.o -c -o crypto/conf/conf_err.o crypto/conf/conf_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_lib.d.tmp -MT crypto/conf/conf_lib.o -c -o crypto/conf/conf_lib.o crypto/conf/conf_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_mall.d.tmp -MT crypto/conf/conf_mall.o -c -o crypto/conf/conf_mall.o crypto/conf/conf_mall.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_mod.d.tmp -MT crypto/conf/conf_mod.o -c -o crypto/conf/conf_mod.o crypto/conf/conf_mod.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_sap.d.tmp -MT crypto/conf/conf_sap.o -c -o crypto/conf/conf_sap.o crypto/conf/conf_sap.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/conf/conf_ssl.d.tmp -MT crypto/conf/conf_ssl.o -c -o crypto/conf/conf_ssl.o crypto/conf/conf_ssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cpt_err.d.tmp -MT crypto/cpt_err.o -c -o crypto/cpt_err.o crypto/cpt_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cryptlib.d.tmp -MT crypto/cryptlib.o -c -o crypto/cryptlib.o crypto/cryptlib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_b64.d.tmp -MT crypto/ct/ct_b64.o -c -o crypto/ct/ct_b64.o crypto/ct/ct_b64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_err.d.tmp -MT crypto/ct/ct_err.o -c -o crypto/ct/ct_err.o crypto/ct/ct_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_log.d.tmp -MT crypto/ct/ct_log.o -c -o crypto/ct/ct_log.o crypto/ct/ct_log.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_oct.d.tmp -MT crypto/ct/ct_oct.o -c -o crypto/ct/ct_oct.o crypto/ct/ct_oct.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_policy.d.tmp -MT crypto/ct/ct_policy.o -c -o crypto/ct/ct_policy.o crypto/ct/ct_policy.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_prn.d.tmp -MT crypto/ct/ct_prn.o -c -o crypto/ct/ct_prn.o crypto/ct/ct_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_sct.d.tmp -MT crypto/ct/ct_sct.o -c -o crypto/ct/ct_sct.o crypto/ct/ct_sct.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_sct_ctx.d.tmp -MT crypto/ct/ct_sct_ctx.o -c -o crypto/ct/ct_sct_ctx.o crypto/ct/ct_sct_ctx.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_vfy.d.tmp -MT crypto/ct/ct_vfy.o -c -o crypto/ct/ct_vfy.o crypto/ct/ct_vfy.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ct/ct_x509v3.d.tmp -MT crypto/ct/ct_x509v3.o -c -o crypto/ct/ct_x509v3.o crypto/ct/ct_x509v3.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ctype.d.tmp -MT crypto/ctype.o -c -o crypto/ctype.o crypto/ctype.c /usr/bin/perl util/mkbuildinf.pl "mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT" "linux-mips-openwrt" > crypto/buildinf.h mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/cversion.d.tmp -MT crypto/cversion.o -c -o crypto/cversion.o crypto/cversion.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/cbc_cksm.d.tmp -MT crypto/des/cbc_cksm.o -c -o crypto/des/cbc_cksm.o crypto/des/cbc_cksm.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/cbc_enc.d.tmp -MT crypto/des/cbc_enc.o -c -o crypto/des/cbc_enc.o crypto/des/cbc_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/cfb64ede.d.tmp -MT crypto/des/cfb64ede.o -c -o crypto/des/cfb64ede.o crypto/des/cfb64ede.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/cfb64enc.d.tmp -MT crypto/des/cfb64enc.o -c -o crypto/des/cfb64enc.o crypto/des/cfb64enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/cfb_enc.d.tmp -MT crypto/des/cfb_enc.o -c -o crypto/des/cfb_enc.o crypto/des/cfb_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/des_enc.d.tmp -MT crypto/des/des_enc.o -c -o crypto/des/des_enc.o crypto/des/des_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/ecb3_enc.d.tmp -MT crypto/des/ecb3_enc.o -c -o crypto/des/ecb3_enc.o crypto/des/ecb3_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/ecb_enc.d.tmp -MT crypto/des/ecb_enc.o -c -o crypto/des/ecb_enc.o crypto/des/ecb_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/fcrypt.d.tmp -MT crypto/des/fcrypt.o -c -o crypto/des/fcrypt.o crypto/des/fcrypt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/fcrypt_b.d.tmp -MT crypto/des/fcrypt_b.o -c -o crypto/des/fcrypt_b.o crypto/des/fcrypt_b.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/ofb64ede.d.tmp -MT crypto/des/ofb64ede.o -c -o crypto/des/ofb64ede.o crypto/des/ofb64ede.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/ofb64enc.d.tmp -MT crypto/des/ofb64enc.o -c -o crypto/des/ofb64enc.o crypto/des/ofb64enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/ofb_enc.d.tmp -MT crypto/des/ofb_enc.o -c -o crypto/des/ofb_enc.o crypto/des/ofb_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/pcbc_enc.d.tmp -MT crypto/des/pcbc_enc.o -c -o crypto/des/pcbc_enc.o crypto/des/pcbc_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/qud_cksm.d.tmp -MT crypto/des/qud_cksm.o -c -o crypto/des/qud_cksm.o crypto/des/qud_cksm.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/rand_key.d.tmp -MT crypto/des/rand_key.o -c -o crypto/des/rand_key.o crypto/des/rand_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/set_key.d.tmp -MT crypto/des/set_key.o -c -o crypto/des/set_key.o crypto/des/set_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/str2key.d.tmp -MT crypto/des/str2key.o -c -o crypto/des/str2key.o crypto/des/str2key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/des/xcbc_enc.d.tmp -MT crypto/des/xcbc_enc.o -c -o crypto/des/xcbc_enc.o crypto/des/xcbc_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_ameth.d.tmp -MT crypto/dh/dh_ameth.o -c -o crypto/dh/dh_ameth.o crypto/dh/dh_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_asn1.d.tmp -MT crypto/dh/dh_asn1.o -c -o crypto/dh/dh_asn1.o crypto/dh/dh_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_check.d.tmp -MT crypto/dh/dh_check.o -c -o crypto/dh/dh_check.o crypto/dh/dh_check.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_depr.d.tmp -MT crypto/dh/dh_depr.o -c -o crypto/dh/dh_depr.o crypto/dh/dh_depr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_err.d.tmp -MT crypto/dh/dh_err.o -c -o crypto/dh/dh_err.o crypto/dh/dh_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_gen.d.tmp -MT crypto/dh/dh_gen.o -c -o crypto/dh/dh_gen.o crypto/dh/dh_gen.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_kdf.d.tmp -MT crypto/dh/dh_kdf.o -c -o crypto/dh/dh_kdf.o crypto/dh/dh_kdf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_key.d.tmp -MT crypto/dh/dh_key.o -c -o crypto/dh/dh_key.o crypto/dh/dh_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_lib.d.tmp -MT crypto/dh/dh_lib.o -c -o crypto/dh/dh_lib.o crypto/dh/dh_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_meth.d.tmp -MT crypto/dh/dh_meth.o -c -o crypto/dh/dh_meth.o crypto/dh/dh_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_pmeth.d.tmp -MT crypto/dh/dh_pmeth.o -c -o crypto/dh/dh_pmeth.o crypto/dh/dh_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_prn.d.tmp -MT crypto/dh/dh_prn.o -c -o crypto/dh/dh_prn.o crypto/dh/dh_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_rfc5114.d.tmp -MT crypto/dh/dh_rfc5114.o -c -o crypto/dh/dh_rfc5114.o crypto/dh/dh_rfc5114.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dh/dh_rfc7919.d.tmp -MT crypto/dh/dh_rfc7919.o -c -o crypto/dh/dh_rfc7919.o crypto/dh/dh_rfc7919.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_ameth.d.tmp -MT crypto/dsa/dsa_ameth.o -c -o crypto/dsa/dsa_ameth.o crypto/dsa/dsa_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_asn1.d.tmp -MT crypto/dsa/dsa_asn1.o -c -o crypto/dsa/dsa_asn1.o crypto/dsa/dsa_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_depr.d.tmp -MT crypto/dsa/dsa_depr.o -c -o crypto/dsa/dsa_depr.o crypto/dsa/dsa_depr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_err.d.tmp -MT crypto/dsa/dsa_err.o -c -o crypto/dsa/dsa_err.o crypto/dsa/dsa_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_gen.d.tmp -MT crypto/dsa/dsa_gen.o -c -o crypto/dsa/dsa_gen.o crypto/dsa/dsa_gen.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_key.d.tmp -MT crypto/dsa/dsa_key.o -c -o crypto/dsa/dsa_key.o crypto/dsa/dsa_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_lib.d.tmp -MT crypto/dsa/dsa_lib.o -c -o crypto/dsa/dsa_lib.o crypto/dsa/dsa_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_meth.d.tmp -MT crypto/dsa/dsa_meth.o -c -o crypto/dsa/dsa_meth.o crypto/dsa/dsa_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_ossl.d.tmp -MT crypto/dsa/dsa_ossl.o -c -o crypto/dsa/dsa_ossl.o crypto/dsa/dsa_ossl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_pmeth.d.tmp -MT crypto/dsa/dsa_pmeth.o -c -o crypto/dsa/dsa_pmeth.o crypto/dsa/dsa_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_prn.d.tmp -MT crypto/dsa/dsa_prn.o -c -o crypto/dsa/dsa_prn.o crypto/dsa/dsa_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_sign.d.tmp -MT crypto/dsa/dsa_sign.o -c -o crypto/dsa/dsa_sign.o crypto/dsa/dsa_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dsa/dsa_vrf.d.tmp -MT crypto/dsa/dsa_vrf.o -c -o crypto/dsa/dsa_vrf.o crypto/dsa/dsa_vrf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_dl.d.tmp -MT crypto/dso/dso_dl.o -c -o crypto/dso/dso_dl.o crypto/dso/dso_dl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_dlfcn.d.tmp -MT crypto/dso/dso_dlfcn.o -c -o crypto/dso/dso_dlfcn.o crypto/dso/dso_dlfcn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_err.d.tmp -MT crypto/dso/dso_err.o -c -o crypto/dso/dso_err.o crypto/dso/dso_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_lib.d.tmp -MT crypto/dso/dso_lib.o -c -o crypto/dso/dso_lib.o crypto/dso/dso_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_openssl.d.tmp -MT crypto/dso/dso_openssl.o -c -o crypto/dso/dso_openssl.o crypto/dso/dso_openssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_vms.d.tmp -MT crypto/dso/dso_vms.o -c -o crypto/dso/dso_vms.o crypto/dso/dso_vms.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/dso/dso_win32.d.tmp -MT crypto/dso/dso_win32.o -c -o crypto/dso/dso_win32.o crypto/dso/dso_win32.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ebcdic.d.tmp -MT crypto/ebcdic.o -c -o crypto/ebcdic.o crypto/ebcdic.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve25519.d.tmp -MT crypto/ec/curve25519.o -c -o crypto/ec/curve25519.o crypto/ec/curve25519.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/arch_32/f_impl.d.tmp -MT crypto/ec/curve448/arch_32/f_impl.o -c -o crypto/ec/curve448/arch_32/f_impl.o crypto/ec/curve448/arch_32/f_impl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/curve448.d.tmp -MT crypto/ec/curve448/curve448.o -c -o crypto/ec/curve448/curve448.o crypto/ec/curve448/curve448.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/curve448_tables.d.tmp -MT crypto/ec/curve448/curve448_tables.o -c -o crypto/ec/curve448/curve448_tables.o crypto/ec/curve448/curve448_tables.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/eddsa.d.tmp -MT crypto/ec/curve448/eddsa.o -c -o crypto/ec/curve448/eddsa.o crypto/ec/curve448/eddsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/f_generic.d.tmp -MT crypto/ec/curve448/f_generic.o -c -o crypto/ec/curve448/f_generic.o crypto/ec/curve448/f_generic.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/ec/curve448/arch_32 -Icrypto/ec/curve448 -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/curve448/scalar.d.tmp -MT crypto/ec/curve448/scalar.o -c -o crypto/ec/curve448/scalar.o crypto/ec/curve448/scalar.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec2_oct.d.tmp -MT crypto/ec/ec2_oct.o -c -o crypto/ec/ec2_oct.o crypto/ec/ec2_oct.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec2_smpl.d.tmp -MT crypto/ec/ec2_smpl.o -c -o crypto/ec/ec2_smpl.o crypto/ec/ec2_smpl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_ameth.d.tmp -MT crypto/ec/ec_ameth.o -c -o crypto/ec/ec_ameth.o crypto/ec/ec_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_asn1.d.tmp -MT crypto/ec/ec_asn1.o -c -o crypto/ec/ec_asn1.o crypto/ec/ec_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_check.d.tmp -MT crypto/ec/ec_check.o -c -o crypto/ec/ec_check.o crypto/ec/ec_check.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_curve.d.tmp -MT crypto/ec/ec_curve.o -c -o crypto/ec/ec_curve.o crypto/ec/ec_curve.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_cvt.d.tmp -MT crypto/ec/ec_cvt.o -c -o crypto/ec/ec_cvt.o crypto/ec/ec_cvt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_err.d.tmp -MT crypto/ec/ec_err.o -c -o crypto/ec/ec_err.o crypto/ec/ec_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_key.d.tmp -MT crypto/ec/ec_key.o -c -o crypto/ec/ec_key.o crypto/ec/ec_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_kmeth.d.tmp -MT crypto/ec/ec_kmeth.o -c -o crypto/ec/ec_kmeth.o crypto/ec/ec_kmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_lib.d.tmp -MT crypto/ec/ec_lib.o -c -o crypto/ec/ec_lib.o crypto/ec/ec_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_mult.d.tmp -MT crypto/ec/ec_mult.o -c -o crypto/ec/ec_mult.o crypto/ec/ec_mult.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_oct.d.tmp -MT crypto/ec/ec_oct.o -c -o crypto/ec/ec_oct.o crypto/ec/ec_oct.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_pmeth.d.tmp -MT crypto/ec/ec_pmeth.o -c -o crypto/ec/ec_pmeth.o crypto/ec/ec_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ec_print.d.tmp -MT crypto/ec/ec_print.o -c -o crypto/ec/ec_print.o crypto/ec/ec_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecdh_kdf.d.tmp -MT crypto/ec/ecdh_kdf.o -c -o crypto/ec/ecdh_kdf.o crypto/ec/ecdh_kdf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecdh_ossl.d.tmp -MT crypto/ec/ecdh_ossl.o -c -o crypto/ec/ecdh_ossl.o crypto/ec/ecdh_ossl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecdsa_ossl.d.tmp -MT crypto/ec/ecdsa_ossl.o -c -o crypto/ec/ecdsa_ossl.o crypto/ec/ecdsa_ossl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecdsa_sign.d.tmp -MT crypto/ec/ecdsa_sign.o -c -o crypto/ec/ecdsa_sign.o crypto/ec/ecdsa_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecdsa_vrf.d.tmp -MT crypto/ec/ecdsa_vrf.o -c -o crypto/ec/ecdsa_vrf.o crypto/ec/ecdsa_vrf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/eck_prn.d.tmp -MT crypto/ec/eck_prn.o -c -o crypto/ec/eck_prn.o crypto/ec/eck_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_mont.d.tmp -MT crypto/ec/ecp_mont.o -c -o crypto/ec/ecp_mont.o crypto/ec/ecp_mont.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_nist.d.tmp -MT crypto/ec/ecp_nist.o -c -o crypto/ec/ecp_nist.o crypto/ec/ecp_nist.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_nistp224.d.tmp -MT crypto/ec/ecp_nistp224.o -c -o crypto/ec/ecp_nistp224.o crypto/ec/ecp_nistp224.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_nistp256.d.tmp -MT crypto/ec/ecp_nistp256.o -c -o crypto/ec/ecp_nistp256.o crypto/ec/ecp_nistp256.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_nistp521.d.tmp -MT crypto/ec/ecp_nistp521.o -c -o crypto/ec/ecp_nistp521.o crypto/ec/ecp_nistp521.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_nistputil.d.tmp -MT crypto/ec/ecp_nistputil.o -c -o crypto/ec/ecp_nistputil.o crypto/ec/ecp_nistputil.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_oct.d.tmp -MT crypto/ec/ecp_oct.o -c -o crypto/ec/ecp_oct.o crypto/ec/ecp_oct.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecp_smpl.d.tmp -MT crypto/ec/ecp_smpl.o -c -o crypto/ec/ecp_smpl.o crypto/ec/ecp_smpl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ec/ecx_meth.d.tmp -MT crypto/ec/ecx_meth.o -c -o crypto/ec/ecx_meth.o crypto/ec/ecx_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_all.d.tmp -MT crypto/engine/eng_all.o -c -o crypto/engine/eng_all.o crypto/engine/eng_all.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_cnf.d.tmp -MT crypto/engine/eng_cnf.o -c -o crypto/engine/eng_cnf.o crypto/engine/eng_cnf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_ctrl.d.tmp -MT crypto/engine/eng_ctrl.o -c -o crypto/engine/eng_ctrl.o crypto/engine/eng_ctrl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_dyn.d.tmp -MT crypto/engine/eng_dyn.o -c -o crypto/engine/eng_dyn.o crypto/engine/eng_dyn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_err.d.tmp -MT crypto/engine/eng_err.o -c -o crypto/engine/eng_err.o crypto/engine/eng_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_fat.d.tmp -MT crypto/engine/eng_fat.o -c -o crypto/engine/eng_fat.o crypto/engine/eng_fat.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_init.d.tmp -MT crypto/engine/eng_init.o -c -o crypto/engine/eng_init.o crypto/engine/eng_init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_lib.d.tmp -MT crypto/engine/eng_lib.o -c -o crypto/engine/eng_lib.o crypto/engine/eng_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_list.d.tmp -MT crypto/engine/eng_list.o -c -o crypto/engine/eng_list.o crypto/engine/eng_list.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_openssl.d.tmp -MT crypto/engine/eng_openssl.o -c -o crypto/engine/eng_openssl.o crypto/engine/eng_openssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_pkey.d.tmp -MT crypto/engine/eng_pkey.o -c -o crypto/engine/eng_pkey.o crypto/engine/eng_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_rdrand.d.tmp -MT crypto/engine/eng_rdrand.o -c -o crypto/engine/eng_rdrand.o crypto/engine/eng_rdrand.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/eng_table.d.tmp -MT crypto/engine/eng_table.o -c -o crypto/engine/eng_table.o crypto/engine/eng_table.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_asnmth.d.tmp -MT crypto/engine/tb_asnmth.o -c -o crypto/engine/tb_asnmth.o crypto/engine/tb_asnmth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_cipher.d.tmp -MT crypto/engine/tb_cipher.o -c -o crypto/engine/tb_cipher.o crypto/engine/tb_cipher.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_dh.d.tmp -MT crypto/engine/tb_dh.o -c -o crypto/engine/tb_dh.o crypto/engine/tb_dh.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_digest.d.tmp -MT crypto/engine/tb_digest.o -c -o crypto/engine/tb_digest.o crypto/engine/tb_digest.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_dsa.d.tmp -MT crypto/engine/tb_dsa.o -c -o crypto/engine/tb_dsa.o crypto/engine/tb_dsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_eckey.d.tmp -MT crypto/engine/tb_eckey.o -c -o crypto/engine/tb_eckey.o crypto/engine/tb_eckey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_pkmeth.d.tmp -MT crypto/engine/tb_pkmeth.o -c -o crypto/engine/tb_pkmeth.o crypto/engine/tb_pkmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_rand.d.tmp -MT crypto/engine/tb_rand.o -c -o crypto/engine/tb_rand.o crypto/engine/tb_rand.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/engine/tb_rsa.d.tmp -MT crypto/engine/tb_rsa.o -c -o crypto/engine/tb_rsa.o crypto/engine/tb_rsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/err/err.d.tmp -MT crypto/err/err.o -c -o crypto/err/err.o crypto/err/err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/err/err_all.d.tmp -MT crypto/err/err_all.o -c -o crypto/err/err_all.o crypto/err/err_all.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/err/err_prn.d.tmp -MT crypto/err/err_prn.o -c -o crypto/err/err_prn.o crypto/err/err_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/bio_b64.d.tmp -MT crypto/evp/bio_b64.o -c -o crypto/evp/bio_b64.o crypto/evp/bio_b64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/bio_enc.d.tmp -MT crypto/evp/bio_enc.o -c -o crypto/evp/bio_enc.o crypto/evp/bio_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/bio_md.d.tmp -MT crypto/evp/bio_md.o -c -o crypto/evp/bio_md.o crypto/evp/bio_md.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/bio_ok.d.tmp -MT crypto/evp/bio_ok.o -c -o crypto/evp/bio_ok.o crypto/evp/bio_ok.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/c_allc.d.tmp -MT crypto/evp/c_allc.o -c -o crypto/evp/c_allc.o crypto/evp/c_allc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/c_alld.d.tmp -MT crypto/evp/c_alld.o -c -o crypto/evp/c_alld.o crypto/evp/c_alld.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/cmeth_lib.d.tmp -MT crypto/evp/cmeth_lib.o -c -o crypto/evp/cmeth_lib.o crypto/evp/cmeth_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/digest.d.tmp -MT crypto/evp/digest.o -c -o crypto/evp/digest.o crypto/evp/digest.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_aes.d.tmp -MT crypto/evp/e_aes.o -c -o crypto/evp/e_aes.o crypto/evp/e_aes.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_aes_cbc_hmac_sha1.d.tmp -MT crypto/evp/e_aes_cbc_hmac_sha1.o -c -o crypto/evp/e_aes_cbc_hmac_sha1.o crypto/evp/e_aes_cbc_hmac_sha1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_aes_cbc_hmac_sha256.d.tmp -MT crypto/evp/e_aes_cbc_hmac_sha256.o -c -o crypto/evp/e_aes_cbc_hmac_sha256.o crypto/evp/e_aes_cbc_hmac_sha256.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_aria.d.tmp -MT crypto/evp/e_aria.o -c -o crypto/evp/e_aria.o crypto/evp/e_aria.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_bf.d.tmp -MT crypto/evp/e_bf.o -c -o crypto/evp/e_bf.o crypto/evp/e_bf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_camellia.d.tmp -MT crypto/evp/e_camellia.o -c -o crypto/evp/e_camellia.o crypto/evp/e_camellia.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_cast.d.tmp -MT crypto/evp/e_cast.o -c -o crypto/evp/e_cast.o crypto/evp/e_cast.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_chacha20_poly1305.d.tmp -MT crypto/evp/e_chacha20_poly1305.o -c -o crypto/evp/e_chacha20_poly1305.o crypto/evp/e_chacha20_poly1305.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_des.d.tmp -MT crypto/evp/e_des.o -c -o crypto/evp/e_des.o crypto/evp/e_des.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_des3.d.tmp -MT crypto/evp/e_des3.o -c -o crypto/evp/e_des3.o crypto/evp/e_des3.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_idea.d.tmp -MT crypto/evp/e_idea.o -c -o crypto/evp/e_idea.o crypto/evp/e_idea.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_null.d.tmp -MT crypto/evp/e_null.o -c -o crypto/evp/e_null.o crypto/evp/e_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_old.d.tmp -MT crypto/evp/e_old.o -c -o crypto/evp/e_old.o crypto/evp/e_old.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_rc2.d.tmp -MT crypto/evp/e_rc2.o -c -o crypto/evp/e_rc2.o crypto/evp/e_rc2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_rc4.d.tmp -MT crypto/evp/e_rc4.o -c -o crypto/evp/e_rc4.o crypto/evp/e_rc4.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_rc4_hmac_md5.d.tmp -MT crypto/evp/e_rc4_hmac_md5.o -c -o crypto/evp/e_rc4_hmac_md5.o crypto/evp/e_rc4_hmac_md5.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_rc5.d.tmp -MT crypto/evp/e_rc5.o -c -o crypto/evp/e_rc5.o crypto/evp/e_rc5.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_seed.d.tmp -MT crypto/evp/e_seed.o -c -o crypto/evp/e_seed.o crypto/evp/e_seed.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_sm4.d.tmp -MT crypto/evp/e_sm4.o -c -o crypto/evp/e_sm4.o crypto/evp/e_sm4.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/e_xcbc_d.d.tmp -MT crypto/evp/e_xcbc_d.o -c -o crypto/evp/e_xcbc_d.o crypto/evp/e_xcbc_d.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/encode.d.tmp -MT crypto/evp/encode.o -c -o crypto/evp/encode.o crypto/evp/encode.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_cnf.d.tmp -MT crypto/evp/evp_cnf.o -c -o crypto/evp/evp_cnf.o crypto/evp/evp_cnf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_enc.d.tmp -MT crypto/evp/evp_enc.o -c -o crypto/evp/evp_enc.o crypto/evp/evp_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_err.d.tmp -MT crypto/evp/evp_err.o -c -o crypto/evp/evp_err.o crypto/evp/evp_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_key.d.tmp -MT crypto/evp/evp_key.o -c -o crypto/evp/evp_key.o crypto/evp/evp_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_lib.d.tmp -MT crypto/evp/evp_lib.o -c -o crypto/evp/evp_lib.o crypto/evp/evp_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_pbe.d.tmp -MT crypto/evp/evp_pbe.o -c -o crypto/evp/evp_pbe.o crypto/evp/evp_pbe.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/evp_pkey.d.tmp -MT crypto/evp/evp_pkey.o -c -o crypto/evp/evp_pkey.o crypto/evp/evp_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_md2.d.tmp -MT crypto/evp/m_md2.o -c -o crypto/evp/m_md2.o crypto/evp/m_md2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_md4.d.tmp -MT crypto/evp/m_md4.o -c -o crypto/evp/m_md4.o crypto/evp/m_md4.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_md5.d.tmp -MT crypto/evp/m_md5.o -c -o crypto/evp/m_md5.o crypto/evp/m_md5.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_md5_sha1.d.tmp -MT crypto/evp/m_md5_sha1.o -c -o crypto/evp/m_md5_sha1.o crypto/evp/m_md5_sha1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_mdc2.d.tmp -MT crypto/evp/m_mdc2.o -c -o crypto/evp/m_mdc2.o crypto/evp/m_mdc2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_null.d.tmp -MT crypto/evp/m_null.o -c -o crypto/evp/m_null.o crypto/evp/m_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_ripemd.d.tmp -MT crypto/evp/m_ripemd.o -c -o crypto/evp/m_ripemd.o crypto/evp/m_ripemd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_sha1.d.tmp -MT crypto/evp/m_sha1.o -c -o crypto/evp/m_sha1.o crypto/evp/m_sha1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_sha3.d.tmp -MT crypto/evp/m_sha3.o -c -o crypto/evp/m_sha3.o crypto/evp/m_sha3.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_sigver.d.tmp -MT crypto/evp/m_sigver.o -c -o crypto/evp/m_sigver.o crypto/evp/m_sigver.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/m_wp.d.tmp -MT crypto/evp/m_wp.o -c -o crypto/evp/m_wp.o crypto/evp/m_wp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/names.d.tmp -MT crypto/evp/names.o -c -o crypto/evp/names.o crypto/evp/names.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p5_crpt.d.tmp -MT crypto/evp/p5_crpt.o -c -o crypto/evp/p5_crpt.o crypto/evp/p5_crpt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p5_crpt2.d.tmp -MT crypto/evp/p5_crpt2.o -c -o crypto/evp/p5_crpt2.o crypto/evp/p5_crpt2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_dec.d.tmp -MT crypto/evp/p_dec.o -c -o crypto/evp/p_dec.o crypto/evp/p_dec.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_enc.d.tmp -MT crypto/evp/p_enc.o -c -o crypto/evp/p_enc.o crypto/evp/p_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_lib.d.tmp -MT crypto/evp/p_lib.o -c -o crypto/evp/p_lib.o crypto/evp/p_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_open.d.tmp -MT crypto/evp/p_open.o -c -o crypto/evp/p_open.o crypto/evp/p_open.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_seal.d.tmp -MT crypto/evp/p_seal.o -c -o crypto/evp/p_seal.o crypto/evp/p_seal.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_sign.d.tmp -MT crypto/evp/p_sign.o -c -o crypto/evp/p_sign.o crypto/evp/p_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/p_verify.d.tmp -MT crypto/evp/p_verify.o -c -o crypto/evp/p_verify.o crypto/evp/p_verify.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/pbe_scrypt.d.tmp -MT crypto/evp/pbe_scrypt.o -c -o crypto/evp/pbe_scrypt.o crypto/evp/pbe_scrypt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/pmeth_fn.d.tmp -MT crypto/evp/pmeth_fn.o -c -o crypto/evp/pmeth_fn.o crypto/evp/pmeth_fn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/pmeth_gn.d.tmp -MT crypto/evp/pmeth_gn.o -c -o crypto/evp/pmeth_gn.o crypto/evp/pmeth_gn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/evp/pmeth_lib.d.tmp -MT crypto/evp/pmeth_lib.o -c -o crypto/evp/pmeth_lib.o crypto/evp/pmeth_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ex_data.d.tmp -MT crypto/ex_data.o -c -o crypto/ex_data.o crypto/ex_data.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/getenv.d.tmp -MT crypto/getenv.o -c -o crypto/getenv.o crypto/getenv.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/hmac/hm_ameth.d.tmp -MT crypto/hmac/hm_ameth.o -c -o crypto/hmac/hm_ameth.o crypto/hmac/hm_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/hmac/hm_pmeth.d.tmp -MT crypto/hmac/hm_pmeth.o -c -o crypto/hmac/hm_pmeth.o crypto/hmac/hm_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/hmac/hmac.d.tmp -MT crypto/hmac/hmac.o -c -o crypto/hmac/hmac.o crypto/hmac/hmac.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/init.d.tmp -MT crypto/init.o -c -o crypto/init.o crypto/init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/kdf/hkdf.d.tmp -MT crypto/kdf/hkdf.o -c -o crypto/kdf/hkdf.o crypto/kdf/hkdf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/kdf/kdf_err.d.tmp -MT crypto/kdf/kdf_err.o -c -o crypto/kdf/kdf_err.o crypto/kdf/kdf_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/kdf/scrypt.d.tmp -MT crypto/kdf/scrypt.o -c -o crypto/kdf/scrypt.o crypto/kdf/scrypt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/kdf/tls1_prf.d.tmp -MT crypto/kdf/tls1_prf.o -c -o crypto/kdf/tls1_prf.o crypto/kdf/tls1_prf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/lhash/lh_stats.d.tmp -MT crypto/lhash/lh_stats.o -c -o crypto/lhash/lh_stats.o crypto/lhash/lh_stats.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/lhash/lhash.d.tmp -MT crypto/lhash/lhash.o -c -o crypto/lhash/lhash.o crypto/lhash/lhash.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/md4/md4_dgst.d.tmp -MT crypto/md4/md4_dgst.o -c -o crypto/md4/md4_dgst.o crypto/md4/md4_dgst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/md4/md4_one.d.tmp -MT crypto/md4/md4_one.o -c -o crypto/md4/md4_one.o crypto/md4/md4_one.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/md5/md5_dgst.d.tmp -MT crypto/md5/md5_dgst.o -c -o crypto/md5/md5_dgst.o crypto/md5/md5_dgst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/md5/md5_one.d.tmp -MT crypto/md5/md5_one.o -c -o crypto/md5/md5_one.o crypto/md5/md5_one.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/mem.d.tmp -MT crypto/mem.o -c -o crypto/mem.o crypto/mem.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/mem_clr.d.tmp -MT crypto/mem_clr.o -c -o crypto/mem_clr.o crypto/mem_clr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/mem_dbg.d.tmp -MT crypto/mem_dbg.o -c -o crypto/mem_dbg.o crypto/mem_dbg.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/mem_sec.d.tmp -MT crypto/mem_sec.o -c -o crypto/mem_sec.o crypto/mem_sec.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/cbc128.d.tmp -MT crypto/modes/cbc128.o -c -o crypto/modes/cbc128.o crypto/modes/cbc128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/ccm128.d.tmp -MT crypto/modes/ccm128.o -c -o crypto/modes/ccm128.o crypto/modes/ccm128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/cfb128.d.tmp -MT crypto/modes/cfb128.o -c -o crypto/modes/cfb128.o crypto/modes/cfb128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/ctr128.d.tmp -MT crypto/modes/ctr128.o -c -o crypto/modes/ctr128.o crypto/modes/ctr128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/cts128.d.tmp -MT crypto/modes/cts128.o -c -o crypto/modes/cts128.o crypto/modes/cts128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/gcm128.d.tmp -MT crypto/modes/gcm128.o -c -o crypto/modes/gcm128.o crypto/modes/gcm128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/ocb128.d.tmp -MT crypto/modes/ocb128.o -c -o crypto/modes/ocb128.o crypto/modes/ocb128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/ofb128.d.tmp -MT crypto/modes/ofb128.o -c -o crypto/modes/ofb128.o crypto/modes/ofb128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/wrap128.d.tmp -MT crypto/modes/wrap128.o -c -o crypto/modes/wrap128.o crypto/modes/wrap128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/modes/xts128.d.tmp -MT crypto/modes/xts128.o -c -o crypto/modes/xts128.o crypto/modes/xts128.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_dir.d.tmp -MT crypto/o_dir.o -c -o crypto/o_dir.o crypto/o_dir.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_fips.d.tmp -MT crypto/o_fips.o -c -o crypto/o_fips.o crypto/o_fips.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_fopen.d.tmp -MT crypto/o_fopen.o -c -o crypto/o_fopen.o crypto/o_fopen.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_init.d.tmp -MT crypto/o_init.o -c -o crypto/o_init.o crypto/o_init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_str.d.tmp -MT crypto/o_str.o -c -o crypto/o_str.o crypto/o_str.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/o_time.d.tmp -MT crypto/o_time.o -c -o crypto/o_time.o crypto/o_time.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/objects/o_names.d.tmp -MT crypto/objects/o_names.o -c -o crypto/objects/o_names.o crypto/objects/o_names.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/objects/obj_dat.d.tmp -MT crypto/objects/obj_dat.o -c -o crypto/objects/obj_dat.o crypto/objects/obj_dat.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/objects/obj_err.d.tmp -MT crypto/objects/obj_err.o -c -o crypto/objects/obj_err.o crypto/objects/obj_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/objects/obj_lib.d.tmp -MT crypto/objects/obj_lib.o -c -o crypto/objects/obj_lib.o crypto/objects/obj_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/objects/obj_xref.d.tmp -MT crypto/objects/obj_xref.o -c -o crypto/objects/obj_xref.o crypto/objects/obj_xref.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_asn.d.tmp -MT crypto/ocsp/ocsp_asn.o -c -o crypto/ocsp/ocsp_asn.o crypto/ocsp/ocsp_asn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_cl.d.tmp -MT crypto/ocsp/ocsp_cl.o -c -o crypto/ocsp/ocsp_cl.o crypto/ocsp/ocsp_cl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_err.d.tmp -MT crypto/ocsp/ocsp_err.o -c -o crypto/ocsp/ocsp_err.o crypto/ocsp/ocsp_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_ext.d.tmp -MT crypto/ocsp/ocsp_ext.o -c -o crypto/ocsp/ocsp_ext.o crypto/ocsp/ocsp_ext.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_ht.d.tmp -MT crypto/ocsp/ocsp_ht.o -c -o crypto/ocsp/ocsp_ht.o crypto/ocsp/ocsp_ht.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_lib.d.tmp -MT crypto/ocsp/ocsp_lib.o -c -o crypto/ocsp/ocsp_lib.o crypto/ocsp/ocsp_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_prn.d.tmp -MT crypto/ocsp/ocsp_prn.o -c -o crypto/ocsp/ocsp_prn.o crypto/ocsp/ocsp_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_srv.d.tmp -MT crypto/ocsp/ocsp_srv.o -c -o crypto/ocsp/ocsp_srv.o crypto/ocsp/ocsp_srv.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/ocsp_vfy.d.tmp -MT crypto/ocsp/ocsp_vfy.o -c -o crypto/ocsp/ocsp_vfy.o crypto/ocsp/ocsp_vfy.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ocsp/v3_ocsp.d.tmp -MT crypto/ocsp/v3_ocsp.o -c -o crypto/ocsp/v3_ocsp.o crypto/ocsp/v3_ocsp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_all.d.tmp -MT crypto/pem/pem_all.o -c -o crypto/pem/pem_all.o crypto/pem/pem_all.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_err.d.tmp -MT crypto/pem/pem_err.o -c -o crypto/pem/pem_err.o crypto/pem/pem_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_info.d.tmp -MT crypto/pem/pem_info.o -c -o crypto/pem/pem_info.o crypto/pem/pem_info.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_lib.d.tmp -MT crypto/pem/pem_lib.o -c -o crypto/pem/pem_lib.o crypto/pem/pem_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_oth.d.tmp -MT crypto/pem/pem_oth.o -c -o crypto/pem/pem_oth.o crypto/pem/pem_oth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_pk8.d.tmp -MT crypto/pem/pem_pk8.o -c -o crypto/pem/pem_pk8.o crypto/pem/pem_pk8.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_pkey.d.tmp -MT crypto/pem/pem_pkey.o -c -o crypto/pem/pem_pkey.o crypto/pem/pem_pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_sign.d.tmp -MT crypto/pem/pem_sign.o -c -o crypto/pem/pem_sign.o crypto/pem/pem_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_x509.d.tmp -MT crypto/pem/pem_x509.o -c -o crypto/pem/pem_x509.o crypto/pem/pem_x509.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pem_xaux.d.tmp -MT crypto/pem/pem_xaux.o -c -o crypto/pem/pem_xaux.o crypto/pem/pem_xaux.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pem/pvkfmt.d.tmp -MT crypto/pem/pvkfmt.o -c -o crypto/pem/pvkfmt.o crypto/pem/pvkfmt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_add.d.tmp -MT crypto/pkcs12/p12_add.o -c -o crypto/pkcs12/p12_add.o crypto/pkcs12/p12_add.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_asn.d.tmp -MT crypto/pkcs12/p12_asn.o -c -o crypto/pkcs12/p12_asn.o crypto/pkcs12/p12_asn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_attr.d.tmp -MT crypto/pkcs12/p12_attr.o -c -o crypto/pkcs12/p12_attr.o crypto/pkcs12/p12_attr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_crpt.d.tmp -MT crypto/pkcs12/p12_crpt.o -c -o crypto/pkcs12/p12_crpt.o crypto/pkcs12/p12_crpt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_crt.d.tmp -MT crypto/pkcs12/p12_crt.o -c -o crypto/pkcs12/p12_crt.o crypto/pkcs12/p12_crt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_decr.d.tmp -MT crypto/pkcs12/p12_decr.o -c -o crypto/pkcs12/p12_decr.o crypto/pkcs12/p12_decr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_init.d.tmp -MT crypto/pkcs12/p12_init.o -c -o crypto/pkcs12/p12_init.o crypto/pkcs12/p12_init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_key.d.tmp -MT crypto/pkcs12/p12_key.o -c -o crypto/pkcs12/p12_key.o crypto/pkcs12/p12_key.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_kiss.d.tmp -MT crypto/pkcs12/p12_kiss.o -c -o crypto/pkcs12/p12_kiss.o crypto/pkcs12/p12_kiss.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_mutl.d.tmp -MT crypto/pkcs12/p12_mutl.o -c -o crypto/pkcs12/p12_mutl.o crypto/pkcs12/p12_mutl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_npas.d.tmp -MT crypto/pkcs12/p12_npas.o -c -o crypto/pkcs12/p12_npas.o crypto/pkcs12/p12_npas.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_p8d.d.tmp -MT crypto/pkcs12/p12_p8d.o -c -o crypto/pkcs12/p12_p8d.o crypto/pkcs12/p12_p8d.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_p8e.d.tmp -MT crypto/pkcs12/p12_p8e.o -c -o crypto/pkcs12/p12_p8e.o crypto/pkcs12/p12_p8e.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_sbag.d.tmp -MT crypto/pkcs12/p12_sbag.o -c -o crypto/pkcs12/p12_sbag.o crypto/pkcs12/p12_sbag.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/p12_utl.d.tmp -MT crypto/pkcs12/p12_utl.o -c -o crypto/pkcs12/p12_utl.o crypto/pkcs12/p12_utl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs12/pk12err.d.tmp -MT crypto/pkcs12/pk12err.o -c -o crypto/pkcs12/pk12err.o crypto/pkcs12/pk12err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/bio_pk7.d.tmp -MT crypto/pkcs7/bio_pk7.o -c -o crypto/pkcs7/bio_pk7.o crypto/pkcs7/bio_pk7.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_asn1.d.tmp -MT crypto/pkcs7/pk7_asn1.o -c -o crypto/pkcs7/pk7_asn1.o crypto/pkcs7/pk7_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_attr.d.tmp -MT crypto/pkcs7/pk7_attr.o -c -o crypto/pkcs7/pk7_attr.o crypto/pkcs7/pk7_attr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_doit.d.tmp -MT crypto/pkcs7/pk7_doit.o -c -o crypto/pkcs7/pk7_doit.o crypto/pkcs7/pk7_doit.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_lib.d.tmp -MT crypto/pkcs7/pk7_lib.o -c -o crypto/pkcs7/pk7_lib.o crypto/pkcs7/pk7_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_mime.d.tmp -MT crypto/pkcs7/pk7_mime.o -c -o crypto/pkcs7/pk7_mime.o crypto/pkcs7/pk7_mime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pk7_smime.d.tmp -MT crypto/pkcs7/pk7_smime.o -c -o crypto/pkcs7/pk7_smime.o crypto/pkcs7/pk7_smime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/pkcs7/pkcs7err.d.tmp -MT crypto/pkcs7/pkcs7err.o -c -o crypto/pkcs7/pkcs7err.o crypto/pkcs7/pkcs7err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/poly1305/poly1305.d.tmp -MT crypto/poly1305/poly1305.o -c -o crypto/poly1305/poly1305.o crypto/poly1305/poly1305.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/poly1305/poly1305_ameth.d.tmp -MT crypto/poly1305/poly1305_ameth.o -c -o crypto/poly1305/poly1305_ameth.o crypto/poly1305/poly1305_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/poly1305/poly1305_pmeth.d.tmp -MT crypto/poly1305/poly1305_pmeth.o -c -o crypto/poly1305/poly1305_pmeth.o crypto/poly1305/poly1305_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto/modes -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/drbg_ctr.d.tmp -MT crypto/rand/drbg_ctr.o -c -o crypto/rand/drbg_ctr.o crypto/rand/drbg_ctr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/drbg_lib.d.tmp -MT crypto/rand/drbg_lib.o -c -o crypto/rand/drbg_lib.o crypto/rand/drbg_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_egd.d.tmp -MT crypto/rand/rand_egd.o -c -o crypto/rand/rand_egd.o crypto/rand/rand_egd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_err.d.tmp -MT crypto/rand/rand_err.o -c -o crypto/rand/rand_err.o crypto/rand/rand_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_lib.d.tmp -MT crypto/rand/rand_lib.o -c -o crypto/rand/rand_lib.o crypto/rand/rand_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_unix.d.tmp -MT crypto/rand/rand_unix.o -c -o crypto/rand/rand_unix.o crypto/rand/rand_unix.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_vms.d.tmp -MT crypto/rand/rand_vms.o -c -o crypto/rand/rand_vms.o crypto/rand/rand_vms.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/rand_win.d.tmp -MT crypto/rand/rand_win.o -c -o crypto/rand/rand_win.o crypto/rand/rand_win.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rand/randfile.d.tmp -MT crypto/rand/randfile.o -c -o crypto/rand/randfile.o crypto/rand/randfile.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc2/rc2_cbc.d.tmp -MT crypto/rc2/rc2_cbc.o -c -o crypto/rc2/rc2_cbc.o crypto/rc2/rc2_cbc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc2/rc2_ecb.d.tmp -MT crypto/rc2/rc2_ecb.o -c -o crypto/rc2/rc2_ecb.o crypto/rc2/rc2_ecb.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc2/rc2_skey.d.tmp -MT crypto/rc2/rc2_skey.o -c -o crypto/rc2/rc2_skey.o crypto/rc2/rc2_skey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc2/rc2cfb64.d.tmp -MT crypto/rc2/rc2cfb64.o -c -o crypto/rc2/rc2cfb64.o crypto/rc2/rc2cfb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc2/rc2ofb64.d.tmp -MT crypto/rc2/rc2ofb64.o -c -o crypto/rc2/rc2ofb64.o crypto/rc2/rc2ofb64.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc4/rc4_enc.d.tmp -MT crypto/rc4/rc4_enc.o -c -o crypto/rc4/rc4_enc.o crypto/rc4/rc4_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rc4/rc4_skey.d.tmp -MT crypto/rc4/rc4_skey.o -c -o crypto/rc4/rc4_skey.o crypto/rc4/rc4_skey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ripemd/rmd_dgst.d.tmp -MT crypto/ripemd/rmd_dgst.o -c -o crypto/ripemd/rmd_dgst.o crypto/ripemd/rmd_dgst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ripemd/rmd_one.d.tmp -MT crypto/ripemd/rmd_one.o -c -o crypto/ripemd/rmd_one.o crypto/ripemd/rmd_one.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_ameth.d.tmp -MT crypto/rsa/rsa_ameth.o -c -o crypto/rsa/rsa_ameth.o crypto/rsa/rsa_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_asn1.d.tmp -MT crypto/rsa/rsa_asn1.o -c -o crypto/rsa/rsa_asn1.o crypto/rsa/rsa_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_chk.d.tmp -MT crypto/rsa/rsa_chk.o -c -o crypto/rsa/rsa_chk.o crypto/rsa/rsa_chk.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_crpt.d.tmp -MT crypto/rsa/rsa_crpt.o -c -o crypto/rsa/rsa_crpt.o crypto/rsa/rsa_crpt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_depr.d.tmp -MT crypto/rsa/rsa_depr.o -c -o crypto/rsa/rsa_depr.o crypto/rsa/rsa_depr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_err.d.tmp -MT crypto/rsa/rsa_err.o -c -o crypto/rsa/rsa_err.o crypto/rsa/rsa_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_gen.d.tmp -MT crypto/rsa/rsa_gen.o -c -o crypto/rsa/rsa_gen.o crypto/rsa/rsa_gen.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_lib.d.tmp -MT crypto/rsa/rsa_lib.o -c -o crypto/rsa/rsa_lib.o crypto/rsa/rsa_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_meth.d.tmp -MT crypto/rsa/rsa_meth.o -c -o crypto/rsa/rsa_meth.o crypto/rsa/rsa_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_mp.d.tmp -MT crypto/rsa/rsa_mp.o -c -o crypto/rsa/rsa_mp.o crypto/rsa/rsa_mp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_none.d.tmp -MT crypto/rsa/rsa_none.o -c -o crypto/rsa/rsa_none.o crypto/rsa/rsa_none.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_oaep.d.tmp -MT crypto/rsa/rsa_oaep.o -c -o crypto/rsa/rsa_oaep.o crypto/rsa/rsa_oaep.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_ossl.d.tmp -MT crypto/rsa/rsa_ossl.o -c -o crypto/rsa/rsa_ossl.o crypto/rsa/rsa_ossl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_pk1.d.tmp -MT crypto/rsa/rsa_pk1.o -c -o crypto/rsa/rsa_pk1.o crypto/rsa/rsa_pk1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_pmeth.d.tmp -MT crypto/rsa/rsa_pmeth.o -c -o crypto/rsa/rsa_pmeth.o crypto/rsa/rsa_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_prn.d.tmp -MT crypto/rsa/rsa_prn.o -c -o crypto/rsa/rsa_prn.o crypto/rsa/rsa_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_pss.d.tmp -MT crypto/rsa/rsa_pss.o -c -o crypto/rsa/rsa_pss.o crypto/rsa/rsa_pss.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_saos.d.tmp -MT crypto/rsa/rsa_saos.o -c -o crypto/rsa/rsa_saos.o crypto/rsa/rsa_saos.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_sign.d.tmp -MT crypto/rsa/rsa_sign.o -c -o crypto/rsa/rsa_sign.o crypto/rsa/rsa_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_ssl.d.tmp -MT crypto/rsa/rsa_ssl.o -c -o crypto/rsa/rsa_ssl.o crypto/rsa/rsa_ssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_x931.d.tmp -MT crypto/rsa/rsa_x931.o -c -o crypto/rsa/rsa_x931.o crypto/rsa/rsa_x931.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/rsa/rsa_x931g.d.tmp -MT crypto/rsa/rsa_x931g.o -c -o crypto/rsa/rsa_x931g.o crypto/rsa/rsa_x931g.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/sha/keccak1600.d.tmp -MT crypto/sha/keccak1600.o -c -o crypto/sha/keccak1600.o crypto/sha/keccak1600.c CC="mips-openwrt-linux-musl-gcc" /usr/bin/perl crypto/sha/asm/sha1-mips.pl o32 crypto/sha/sha1-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -c -o crypto/sha/sha1-mips.o crypto/sha/sha1-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/sha/sha1_one.d.tmp -MT crypto/sha/sha1_one.o -c -o crypto/sha/sha1_one.o crypto/sha/sha1_one.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/sha/sha1dgst.d.tmp -MT crypto/sha/sha1dgst.o -c -o crypto/sha/sha1dgst.o crypto/sha/sha1dgst.c CC="mips-openwrt-linux-musl-gcc" /usr/bin/perl crypto/sha/asm/sha512-mips.pl o32 crypto/sha/sha256-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -Icrypto -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -c -o crypto/sha/sha256-mips.o crypto/sha/sha256-mips.S mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/sha/sha256.d.tmp -MT crypto/sha/sha256.o -c -o crypto/sha/sha256.o crypto/sha/sha256.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/sha/sha512.d.tmp -MT crypto/sha/sha512.o -c -o crypto/sha/sha512.o crypto/sha/sha512.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/siphash/siphash.d.tmp -MT crypto/siphash/siphash.o -c -o crypto/siphash/siphash.o crypto/siphash/siphash.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/siphash/siphash_ameth.d.tmp -MT crypto/siphash/siphash_ameth.o -c -o crypto/siphash/siphash_ameth.o crypto/siphash/siphash_ameth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/siphash/siphash_pmeth.d.tmp -MT crypto/siphash/siphash_pmeth.o -c -o crypto/siphash/siphash_pmeth.o crypto/siphash/siphash_pmeth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/srp/srp_lib.d.tmp -MT crypto/srp/srp_lib.o -c -o crypto/srp/srp_lib.o crypto/srp/srp_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/srp/srp_vfy.d.tmp -MT crypto/srp/srp_vfy.o -c -o crypto/srp/srp_vfy.o crypto/srp/srp_vfy.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/stack/stack.d.tmp -MT crypto/stack/stack.o -c -o crypto/stack/stack.o crypto/stack/stack.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/loader_file.d.tmp -MT crypto/store/loader_file.o -c -o crypto/store/loader_file.o crypto/store/loader_file.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/store_err.d.tmp -MT crypto/store/store_err.o -c -o crypto/store/store_err.o crypto/store/store_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/store_init.d.tmp -MT crypto/store/store_init.o -c -o crypto/store/store_init.o crypto/store/store_init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/store_lib.d.tmp -MT crypto/store/store_lib.o -c -o crypto/store/store_lib.o crypto/store/store_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/store_register.d.tmp -MT crypto/store/store_register.o -c -o crypto/store/store_register.o crypto/store/store_register.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/store/store_strings.d.tmp -MT crypto/store/store_strings.o -c -o crypto/store/store_strings.o crypto/store/store_strings.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/threads_none.d.tmp -MT crypto/threads_none.o -c -o crypto/threads_none.o crypto/threads_none.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/threads_pthread.d.tmp -MT crypto/threads_pthread.o -c -o crypto/threads_pthread.o crypto/threads_pthread.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/threads_win.d.tmp -MT crypto/threads_win.o -c -o crypto/threads_win.o crypto/threads_win.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_asn1.d.tmp -MT crypto/ts/ts_asn1.o -c -o crypto/ts/ts_asn1.o crypto/ts/ts_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_conf.d.tmp -MT crypto/ts/ts_conf.o -c -o crypto/ts/ts_conf.o crypto/ts/ts_conf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_err.d.tmp -MT crypto/ts/ts_err.o -c -o crypto/ts/ts_err.o crypto/ts/ts_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_lib.d.tmp -MT crypto/ts/ts_lib.o -c -o crypto/ts/ts_lib.o crypto/ts/ts_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_req_print.d.tmp -MT crypto/ts/ts_req_print.o -c -o crypto/ts/ts_req_print.o crypto/ts/ts_req_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_req_utils.d.tmp -MT crypto/ts/ts_req_utils.o -c -o crypto/ts/ts_req_utils.o crypto/ts/ts_req_utils.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_rsp_print.d.tmp -MT crypto/ts/ts_rsp_print.o -c -o crypto/ts/ts_rsp_print.o crypto/ts/ts_rsp_print.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_rsp_sign.d.tmp -MT crypto/ts/ts_rsp_sign.o -c -o crypto/ts/ts_rsp_sign.o crypto/ts/ts_rsp_sign.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_rsp_utils.d.tmp -MT crypto/ts/ts_rsp_utils.o -c -o crypto/ts/ts_rsp_utils.o crypto/ts/ts_rsp_utils.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_rsp_verify.d.tmp -MT crypto/ts/ts_rsp_verify.o -c -o crypto/ts/ts_rsp_verify.o crypto/ts/ts_rsp_verify.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ts/ts_verify_ctx.d.tmp -MT crypto/ts/ts_verify_ctx.o -c -o crypto/ts/ts_verify_ctx.o crypto/ts/ts_verify_ctx.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/txt_db/txt_db.d.tmp -MT crypto/txt_db/txt_db.o -c -o crypto/txt_db/txt_db.o crypto/txt_db/txt_db.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ui/ui_err.d.tmp -MT crypto/ui/ui_err.o -c -o crypto/ui/ui_err.o crypto/ui/ui_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ui/ui_lib.d.tmp -MT crypto/ui/ui_lib.o -c -o crypto/ui/ui_lib.o crypto/ui/ui_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ui/ui_null.d.tmp -MT crypto/ui/ui_null.o -c -o crypto/ui/ui_null.o crypto/ui/ui_null.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ui/ui_openssl.d.tmp -MT crypto/ui/ui_openssl.o -c -o crypto/ui/ui_openssl.o crypto/ui/ui_openssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/ui/ui_util.d.tmp -MT crypto/ui/ui_util.o -c -o crypto/ui/ui_util.o crypto/ui/ui_util.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/uid.d.tmp -MT crypto/uid.o -c -o crypto/uid.o crypto/uid.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/by_dir.d.tmp -MT crypto/x509/by_dir.o -c -o crypto/x509/by_dir.o crypto/x509/by_dir.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/by_file.d.tmp -MT crypto/x509/by_file.o -c -o crypto/x509/by_file.o crypto/x509/by_file.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/t_crl.d.tmp -MT crypto/x509/t_crl.o -c -o crypto/x509/t_crl.o crypto/x509/t_crl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/t_req.d.tmp -MT crypto/x509/t_req.o -c -o crypto/x509/t_req.o crypto/x509/t_req.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/t_x509.d.tmp -MT crypto/x509/t_x509.o -c -o crypto/x509/t_x509.o crypto/x509/t_x509.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_att.d.tmp -MT crypto/x509/x509_att.o -c -o crypto/x509/x509_att.o crypto/x509/x509_att.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_cmp.d.tmp -MT crypto/x509/x509_cmp.o -c -o crypto/x509/x509_cmp.o crypto/x509/x509_cmp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_d2.d.tmp -MT crypto/x509/x509_d2.o -c -o crypto/x509/x509_d2.o crypto/x509/x509_d2.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_def.d.tmp -MT crypto/x509/x509_def.o -c -o crypto/x509/x509_def.o crypto/x509/x509_def.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_err.d.tmp -MT crypto/x509/x509_err.o -c -o crypto/x509/x509_err.o crypto/x509/x509_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_ext.d.tmp -MT crypto/x509/x509_ext.o -c -o crypto/x509/x509_ext.o crypto/x509/x509_ext.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_lu.d.tmp -MT crypto/x509/x509_lu.o -c -o crypto/x509/x509_lu.o crypto/x509/x509_lu.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_meth.d.tmp -MT crypto/x509/x509_meth.o -c -o crypto/x509/x509_meth.o crypto/x509/x509_meth.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_obj.d.tmp -MT crypto/x509/x509_obj.o -c -o crypto/x509/x509_obj.o crypto/x509/x509_obj.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_r2x.d.tmp -MT crypto/x509/x509_r2x.o -c -o crypto/x509/x509_r2x.o crypto/x509/x509_r2x.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_req.d.tmp -MT crypto/x509/x509_req.o -c -o crypto/x509/x509_req.o crypto/x509/x509_req.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_set.d.tmp -MT crypto/x509/x509_set.o -c -o crypto/x509/x509_set.o crypto/x509/x509_set.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_trs.d.tmp -MT crypto/x509/x509_trs.o -c -o crypto/x509/x509_trs.o crypto/x509/x509_trs.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_txt.d.tmp -MT crypto/x509/x509_txt.o -c -o crypto/x509/x509_txt.o crypto/x509/x509_txt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_v3.d.tmp -MT crypto/x509/x509_v3.o -c -o crypto/x509/x509_v3.o crypto/x509/x509_v3.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_vfy.d.tmp -MT crypto/x509/x509_vfy.o -c -o crypto/x509/x509_vfy.o crypto/x509/x509_vfy.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509_vpm.d.tmp -MT crypto/x509/x509_vpm.o -c -o crypto/x509/x509_vpm.o crypto/x509/x509_vpm.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509cset.d.tmp -MT crypto/x509/x509cset.o -c -o crypto/x509/x509cset.o crypto/x509/x509cset.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509name.d.tmp -MT crypto/x509/x509name.o -c -o crypto/x509/x509name.o crypto/x509/x509name.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509rset.d.tmp -MT crypto/x509/x509rset.o -c -o crypto/x509/x509rset.o crypto/x509/x509rset.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509spki.d.tmp -MT crypto/x509/x509spki.o -c -o crypto/x509/x509spki.o crypto/x509/x509spki.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x509type.d.tmp -MT crypto/x509/x509type.o -c -o crypto/x509/x509type.o crypto/x509/x509type.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_all.d.tmp -MT crypto/x509/x_all.o -c -o crypto/x509/x_all.o crypto/x509/x_all.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_attrib.d.tmp -MT crypto/x509/x_attrib.o -c -o crypto/x509/x_attrib.o crypto/x509/x_attrib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_crl.d.tmp -MT crypto/x509/x_crl.o -c -o crypto/x509/x_crl.o crypto/x509/x_crl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_exten.d.tmp -MT crypto/x509/x_exten.o -c -o crypto/x509/x_exten.o crypto/x509/x_exten.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_name.d.tmp -MT crypto/x509/x_name.o -c -o crypto/x509/x_name.o crypto/x509/x_name.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_pubkey.d.tmp -MT crypto/x509/x_pubkey.o -c -o crypto/x509/x_pubkey.o crypto/x509/x_pubkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_req.d.tmp -MT crypto/x509/x_req.o -c -o crypto/x509/x_req.o crypto/x509/x_req.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_x509.d.tmp -MT crypto/x509/x_x509.o -c -o crypto/x509/x_x509.o crypto/x509/x_x509.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509/x_x509a.d.tmp -MT crypto/x509/x_x509a.o -c -o crypto/x509/x_x509a.o crypto/x509/x_x509a.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_cache.d.tmp -MT crypto/x509v3/pcy_cache.o -c -o crypto/x509v3/pcy_cache.o crypto/x509v3/pcy_cache.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_data.d.tmp -MT crypto/x509v3/pcy_data.o -c -o crypto/x509v3/pcy_data.o crypto/x509v3/pcy_data.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_lib.d.tmp -MT crypto/x509v3/pcy_lib.o -c -o crypto/x509v3/pcy_lib.o crypto/x509v3/pcy_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_map.d.tmp -MT crypto/x509v3/pcy_map.o -c -o crypto/x509v3/pcy_map.o crypto/x509v3/pcy_map.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_node.d.tmp -MT crypto/x509v3/pcy_node.o -c -o crypto/x509v3/pcy_node.o crypto/x509v3/pcy_node.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/pcy_tree.d.tmp -MT crypto/x509v3/pcy_tree.o -c -o crypto/x509v3/pcy_tree.o crypto/x509v3/pcy_tree.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_addr.d.tmp -MT crypto/x509v3/v3_addr.o -c -o crypto/x509v3/v3_addr.o crypto/x509v3/v3_addr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_admis.d.tmp -MT crypto/x509v3/v3_admis.o -c -o crypto/x509v3/v3_admis.o crypto/x509v3/v3_admis.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_akey.d.tmp -MT crypto/x509v3/v3_akey.o -c -o crypto/x509v3/v3_akey.o crypto/x509v3/v3_akey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_akeya.d.tmp -MT crypto/x509v3/v3_akeya.o -c -o crypto/x509v3/v3_akeya.o crypto/x509v3/v3_akeya.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_alt.d.tmp -MT crypto/x509v3/v3_alt.o -c -o crypto/x509v3/v3_alt.o crypto/x509v3/v3_alt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_asid.d.tmp -MT crypto/x509v3/v3_asid.o -c -o crypto/x509v3/v3_asid.o crypto/x509v3/v3_asid.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_bcons.d.tmp -MT crypto/x509v3/v3_bcons.o -c -o crypto/x509v3/v3_bcons.o crypto/x509v3/v3_bcons.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_bitst.d.tmp -MT crypto/x509v3/v3_bitst.o -c -o crypto/x509v3/v3_bitst.o crypto/x509v3/v3_bitst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_conf.d.tmp -MT crypto/x509v3/v3_conf.o -c -o crypto/x509v3/v3_conf.o crypto/x509v3/v3_conf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_cpols.d.tmp -MT crypto/x509v3/v3_cpols.o -c -o crypto/x509v3/v3_cpols.o crypto/x509v3/v3_cpols.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_crld.d.tmp -MT crypto/x509v3/v3_crld.o -c -o crypto/x509v3/v3_crld.o crypto/x509v3/v3_crld.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_enum.d.tmp -MT crypto/x509v3/v3_enum.o -c -o crypto/x509v3/v3_enum.o crypto/x509v3/v3_enum.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_extku.d.tmp -MT crypto/x509v3/v3_extku.o -c -o crypto/x509v3/v3_extku.o crypto/x509v3/v3_extku.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_genn.d.tmp -MT crypto/x509v3/v3_genn.o -c -o crypto/x509v3/v3_genn.o crypto/x509v3/v3_genn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_ia5.d.tmp -MT crypto/x509v3/v3_ia5.o -c -o crypto/x509v3/v3_ia5.o crypto/x509v3/v3_ia5.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_info.d.tmp -MT crypto/x509v3/v3_info.o -c -o crypto/x509v3/v3_info.o crypto/x509v3/v3_info.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_int.d.tmp -MT crypto/x509v3/v3_int.o -c -o crypto/x509v3/v3_int.o crypto/x509v3/v3_int.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_lib.d.tmp -MT crypto/x509v3/v3_lib.o -c -o crypto/x509v3/v3_lib.o crypto/x509v3/v3_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_ncons.d.tmp -MT crypto/x509v3/v3_ncons.o -c -o crypto/x509v3/v3_ncons.o crypto/x509v3/v3_ncons.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_pci.d.tmp -MT crypto/x509v3/v3_pci.o -c -o crypto/x509v3/v3_pci.o crypto/x509v3/v3_pci.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_pcia.d.tmp -MT crypto/x509v3/v3_pcia.o -c -o crypto/x509v3/v3_pcia.o crypto/x509v3/v3_pcia.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_pcons.d.tmp -MT crypto/x509v3/v3_pcons.o -c -o crypto/x509v3/v3_pcons.o crypto/x509v3/v3_pcons.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_pku.d.tmp -MT crypto/x509v3/v3_pku.o -c -o crypto/x509v3/v3_pku.o crypto/x509v3/v3_pku.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_pmaps.d.tmp -MT crypto/x509v3/v3_pmaps.o -c -o crypto/x509v3/v3_pmaps.o crypto/x509v3/v3_pmaps.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_prn.d.tmp -MT crypto/x509v3/v3_prn.o -c -o crypto/x509v3/v3_prn.o crypto/x509v3/v3_prn.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_purp.d.tmp -MT crypto/x509v3/v3_purp.o -c -o crypto/x509v3/v3_purp.o crypto/x509v3/v3_purp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_skey.d.tmp -MT crypto/x509v3/v3_skey.o -c -o crypto/x509v3/v3_skey.o crypto/x509v3/v3_skey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_sxnet.d.tmp -MT crypto/x509v3/v3_sxnet.o -c -o crypto/x509v3/v3_sxnet.o crypto/x509v3/v3_sxnet.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_tlsf.d.tmp -MT crypto/x509v3/v3_tlsf.o -c -o crypto/x509v3/v3_tlsf.o crypto/x509v3/v3_tlsf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3_utl.d.tmp -MT crypto/x509v3/v3_utl.o -c -o crypto/x509v3/v3_utl.o crypto/x509v3/v3_utl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF crypto/x509v3/v3err.d.tmp -MT crypto/x509v3/v3err.o -c -o crypto/x509v3/v3err.o crypto/x509v3/v3err.c mips-openwrt-linux-musl-ar r libcrypto.a crypto/aes/aes-mips.o crypto/aes/aes_cbc.o crypto/aes/aes_cfb.o crypto/aes/aes_ecb.o crypto/aes/aes_ige.o crypto/aes/aes_misc.o crypto/aes/aes_ofb.o crypto/aes/aes_wrap.o crypto/asn1/a_bitstr.o crypto/asn1/a_d2i_fp.o crypto/asn1/a_digest.o crypto/asn1/a_dup.o crypto/asn1/a_gentm.o crypto/asn1/a_i2d_fp.o crypto/asn1/a_int.o crypto/asn1/a_mbstr.o crypto/asn1/a_object.o crypto/asn1/a_octet.o crypto/asn1/a_print.o crypto/asn1/a_sign.o crypto/asn1/a_strex.o crypto/asn1/a_strnid.o crypto/asn1/a_time.o crypto/asn1/a_type.o crypto/asn1/a_utctm.o crypto/asn1/a_utf8.o crypto/asn1/a_verify.o crypto/asn1/ameth_lib.o crypto/asn1/asn1_err.o crypto/asn1/asn1_gen.o crypto/asn1/asn1_item_list.o crypto/asn1/asn1_lib.o crypto/asn1/asn1_par.o crypto/asn1/asn_mime.o crypto/asn1/asn_moid.o crypto/asn1/asn_mstbl.o crypto/asn1/asn_pack.o crypto/asn1/bio_asn1.o crypto/asn1/bio_ndef.o crypto/asn1/d2i_pr.o crypto/asn1/d2i_pu.o crypto/asn1/evp_asn1.o crypto/asn1/f_int.o crypto/asn1/f_string.o crypto/asn1/i2d_pr.o crypto/asn1/i2d_pu.o crypto/asn1/n_pkey.o crypto/asn1/nsseq.o crypto/asn1/p5_pbe.o crypto/asn1/p5_pbev2.o crypto/asn1/p5_scrypt.o crypto/asn1/p8_pkey.o crypto/asn1/t_bitst.o crypto/asn1/t_pkey.o crypto/asn1/t_spki.o crypto/asn1/tasn_dec.o crypto/asn1/tasn_enc.o crypto/asn1/tasn_fre.o crypto/asn1/tasn_new.o crypto/asn1/tasn_prn.o crypto/asn1/tasn_scn.o crypto/asn1/tasn_typ.o crypto/asn1/tasn_utl.o crypto/asn1/x_algor.o crypto/asn1/x_bignum.o crypto/asn1/x_info.o crypto/asn1/x_int64.o crypto/asn1/x_long.o crypto/asn1/x_pkey.o crypto/asn1/x_sig.o crypto/asn1/x_spki.o crypto/asn1/x_val.o crypto/async/arch/async_null.o crypto/async/arch/async_posix.o crypto/async/arch/async_win.o crypto/async/async.o crypto/async/async_err.o crypto/async/async_wait.o crypto/bf/bf_cfb64.o crypto/bf/bf_ecb.o crypto/bf/bf_enc.o crypto/bf/bf_ofb64.o crypto/bf/bf_skey.o crypto/bio/b_addr.o crypto/bio/b_dump.o crypto/bio/b_print.o crypto/bio/b_sock.o crypto/bio/b_sock2.o crypto/bio/bf_buff.o crypto/bio/bf_lbuf.o crypto/bio/bf_nbio.o crypto/bio/bf_null.o crypto/bio/bio_cb.o crypto/bio/bio_err.o crypto/bio/bio_lib.o crypto/bio/bio_meth.o crypto/bio/bss_acpt.o crypto/bio/bss_bio.o crypto/bio/bss_conn.o crypto/bio/bss_dgram.o crypto/bio/bss_fd.o crypto/bio/bss_file.o crypto/bio/bss_log.o crypto/bio/bss_mem.o crypto/bio/bss_null.o crypto/bio/bss_sock.o crypto/bn/bn-mips.o crypto/bn/bn_add.o crypto/bn/bn_blind.o crypto/bn/bn_const.o crypto/bn/bn_ctx.o crypto/bn/bn_depr.o crypto/bn/bn_dh.o crypto/bn/bn_div.o crypto/bn/bn_err.o crypto/bn/bn_exp.o crypto/bn/bn_exp2.o crypto/bn/bn_gcd.o crypto/bn/bn_gf2m.o crypto/bn/bn_intern.o crypto/bn/bn_kron.o crypto/bn/bn_lib.o crypto/bn/bn_mod.o crypto/bn/bn_mont.o crypto/bn/bn_mpi.o crypto/bn/bn_mul.o crypto/bn/bn_nist.o crypto/bn/bn_prime.o crypto/bn/bn_print.o crypto/bn/bn_rand.o crypto/bn/bn_recp.o crypto/bn/bn_shift.o crypto/bn/bn_sqr.o crypto/bn/bn_sqrt.o crypto/bn/bn_srp.o crypto/bn/bn_word.o crypto/bn/bn_x931p.o crypto/bn/mips-mont.o crypto/buffer/buf_err.o crypto/buffer/buffer.o crypto/cast/c_cfb64.o crypto/cast/c_ecb.o crypto/cast/c_enc.o crypto/cast/c_ofb64.o crypto/cast/c_skey.o crypto/chacha/chacha_enc.o crypto/cmac/cm_ameth.o crypto/cmac/cm_pmeth.o crypto/cmac/cmac.o crypto/cms/cms_asn1.o crypto/cms/cms_att.o crypto/cms/cms_cd.o crypto/cms/cms_dd.o crypto/cms/cms_enc.o crypto/cms/cms_env.o crypto/cms/cms_err.o crypto/cms/cms_ess.o crypto/cms/cms_io.o crypto/cms/cms_kari.o crypto/cms/cms_lib.o crypto/cms/cms_pwri.o crypto/cms/cms_sd.o crypto/cms/cms_smime.o crypto/conf/conf_api.o crypto/conf/conf_def.o crypto/conf/conf_err.o crypto/conf/conf_lib.o crypto/conf/conf_mall.o crypto/conf/conf_mod.o crypto/conf/conf_sap.o crypto/conf/conf_ssl.o crypto/cpt_err.o crypto/cryptlib.o crypto/ct/ct_b64.o crypto/ct/ct_err.o crypto/ct/ct_log.o crypto/ct/ct_oct.o crypto/ct/ct_policy.o crypto/ct/ct_prn.o crypto/ct/ct_sct.o crypto/ct/ct_sct_ctx.o crypto/ct/ct_vfy.o crypto/ct/ct_x509v3.o crypto/ctype.o crypto/cversion.o crypto/des/cbc_cksm.o crypto/des/cbc_enc.o crypto/des/cfb64ede.o crypto/des/cfb64enc.o crypto/des/cfb_enc.o crypto/des/des_enc.o crypto/des/ecb3_enc.o crypto/des/ecb_enc.o crypto/des/fcrypt.o crypto/des/fcrypt_b.o crypto/des/ofb64ede.o crypto/des/ofb64enc.o crypto/des/ofb_enc.o crypto/des/pcbc_enc.o crypto/des/qud_cksm.o crypto/des/rand_key.o crypto/des/set_key.o crypto/des/str2key.o crypto/des/xcbc_enc.o crypto/dh/dh_ameth.o crypto/dh/dh_asn1.o crypto/dh/dh_check.o crypto/dh/dh_depr.o crypto/dh/dh_err.o crypto/dh/dh_gen.o crypto/dh/dh_kdf.o crypto/dh/dh_key.o crypto/dh/dh_lib.o crypto/dh/dh_meth.o crypto/dh/dh_pmeth.o crypto/dh/dh_prn.o crypto/dh/dh_rfc5114.o crypto/dh/dh_rfc7919.o crypto/dsa/dsa_ameth.o crypto/dsa/dsa_asn1.o crypto/dsa/dsa_depr.o crypto/dsa/dsa_err.o crypto/dsa/dsa_gen.o crypto/dsa/dsa_key.o crypto/dsa/dsa_lib.o crypto/dsa/dsa_meth.o crypto/dsa/dsa_ossl.o crypto/dsa/dsa_pmeth.o crypto/dsa/dsa_prn.o crypto/dsa/dsa_sign.o crypto/dsa/dsa_vrf.o crypto/dso/dso_dl.o crypto/dso/dso_dlfcn.o crypto/dso/dso_err.o crypto/dso/dso_lib.o crypto/dso/dso_openssl.o crypto/dso/dso_vms.o crypto/dso/dso_win32.o crypto/ebcdic.o crypto/ec/curve25519.o crypto/ec/curve448/arch_32/f_impl.o crypto/ec/curve448/curve448.o crypto/ec/curve448/curve448_tables.o crypto/ec/curve448/eddsa.o crypto/ec/curve448/f_generic.o crypto/ec/curve448/scalar.o crypto/ec/ec2_oct.o crypto/ec/ec2_smpl.o crypto/ec/ec_ameth.o crypto/ec/ec_asn1.o crypto/ec/ec_check.o crypto/ec/ec_curve.o crypto/ec/ec_cvt.o crypto/ec/ec_err.o crypto/ec/ec_key.o crypto/ec/ec_kmeth.o crypto/ec/ec_lib.o crypto/ec/ec_mult.o crypto/ec/ec_oct.o crypto/ec/ec_pmeth.o crypto/ec/ec_print.o crypto/ec/ecdh_kdf.o crypto/ec/ecdh_ossl.o crypto/ec/ecdsa_ossl.o crypto/ec/ecdsa_sign.o crypto/ec/ecdsa_vrf.o crypto/ec/eck_prn.o crypto/ec/ecp_mont.o crypto/ec/ecp_nist.o crypto/ec/ecp_nistp224.o crypto/ec/ecp_nistp256.o crypto/ec/ecp_nistp521.o crypto/ec/ecp_nistputil.o crypto/ec/ecp_oct.o crypto/ec/ecp_smpl.o crypto/ec/ecx_meth.o crypto/engine/eng_all.o crypto/engine/eng_cnf.o crypto/engine/eng_ctrl.o crypto/engine/eng_dyn.o crypto/engine/eng_err.o crypto/engine/eng_fat.o crypto/engine/eng_init.o crypto/engine/eng_lib.o crypto/engine/eng_list.o crypto/engine/eng_openssl.o crypto/engine/eng_pkey.o crypto/engine/eng_rdrand.o crypto/engine/eng_table.o crypto/engine/tb_asnmth.o crypto/engine/tb_cipher.o crypto/engine/tb_dh.o crypto/engine/tb_digest.o crypto/engine/tb_dsa.o crypto/engine/tb_eckey.o crypto/engine/tb_pkmeth.o crypto/engine/tb_rand.o crypto/engine/tb_rsa.o crypto/err/err.o crypto/err/err_all.o crypto/err/err_prn.o crypto/evp/bio_b64.o crypto/evp/bio_enc.o crypto/evp/bio_md.o crypto/evp/bio_ok.o crypto/evp/c_allc.o crypto/evp/c_alld.o crypto/evp/cmeth_lib.o crypto/evp/digest.o crypto/evp/e_aes.o crypto/evp/e_aes_cbc_hmac_sha1.o crypto/evp/e_aes_cbc_hmac_sha256.o crypto/evp/e_aria.o crypto/evp/e_bf.o crypto/evp/e_camellia.o crypto/evp/e_cast.o crypto/evp/e_chacha20_poly1305.o crypto/evp/e_des.o crypto/evp/e_des3.o crypto/evp/e_idea.o crypto/evp/e_null.o crypto/evp/e_old.o crypto/evp/e_rc2.o crypto/evp/e_rc4.o crypto/evp/e_rc4_hmac_md5.o crypto/evp/e_rc5.o crypto/evp/e_seed.o crypto/evp/e_sm4.o crypto/evp/e_xcbc_d.o crypto/evp/encode.o crypto/evp/evp_cnf.o crypto/evp/evp_enc.o crypto/evp/evp_err.o crypto/evp/evp_key.o crypto/evp/evp_lib.o crypto/evp/evp_pbe.o crypto/evp/evp_pkey.o crypto/evp/m_md2.o crypto/evp/m_md4.o crypto/evp/m_md5.o crypto/evp/m_md5_sha1.o crypto/evp/m_mdc2.o crypto/evp/m_null.o crypto/evp/m_ripemd.o crypto/evp/m_sha1.o crypto/evp/m_sha3.o crypto/evp/m_sigver.o crypto/evp/m_wp.o crypto/evp/names.o crypto/evp/p5_crpt.o crypto/evp/p5_crpt2.o crypto/evp/p_dec.o crypto/evp/p_enc.o crypto/evp/p_lib.o crypto/evp/p_open.o crypto/evp/p_seal.o crypto/evp/p_sign.o crypto/evp/p_verify.o crypto/evp/pbe_scrypt.o crypto/evp/pmeth_fn.o crypto/evp/pmeth_gn.o crypto/evp/pmeth_lib.o crypto/ex_data.o crypto/getenv.o crypto/hmac/hm_ameth.o crypto/hmac/hm_pmeth.o crypto/hmac/hmac.o crypto/init.o crypto/kdf/hkdf.o crypto/kdf/kdf_err.o crypto/kdf/scrypt.o crypto/kdf/tls1_prf.o crypto/lhash/lh_stats.o crypto/lhash/lhash.o crypto/md4/md4_dgst.o crypto/md4/md4_one.o crypto/md5/md5_dgst.o crypto/md5/md5_one.o crypto/mem.o crypto/mem_clr.o crypto/mem_dbg.o crypto/mem_sec.o crypto/modes/cbc128.o crypto/modes/ccm128.o crypto/modes/cfb128.o crypto/modes/ctr128.o crypto/modes/cts128.o crypto/modes/gcm128.o crypto/modes/ocb128.o crypto/modes/ofb128.o crypto/modes/wrap128.o crypto/modes/xts128.o crypto/o_dir.o crypto/o_fips.o crypto/o_fopen.o crypto/o_init.o crypto/o_str.o crypto/o_time.o crypto/objects/o_names.o crypto/objects/obj_dat.o crypto/objects/obj_err.o crypto/objects/obj_lib.o crypto/objects/obj_xref.o crypto/ocsp/ocsp_asn.o crypto/ocsp/ocsp_cl.o crypto/ocsp/ocsp_err.o crypto/ocsp/ocsp_ext.o crypto/ocsp/ocsp_ht.o crypto/ocsp/ocsp_lib.o crypto/ocsp/ocsp_prn.o crypto/ocsp/ocsp_srv.o crypto/ocsp/ocsp_vfy.o crypto/ocsp/v3_ocsp.o crypto/pem/pem_all.o crypto/pem/pem_err.o crypto/pem/pem_info.o crypto/pem/pem_lib.o crypto/pem/pem_oth.o crypto/pem/pem_pk8.o crypto/pem/pem_pkey.o crypto/pem/pem_sign.o crypto/pem/pem_x509.o crypto/pem/pem_xaux.o crypto/pem/pvkfmt.o crypto/pkcs12/p12_add.o crypto/pkcs12/p12_asn.o crypto/pkcs12/p12_attr.o crypto/pkcs12/p12_crpt.o crypto/pkcs12/p12_crt.o crypto/pkcs12/p12_decr.o crypto/pkcs12/p12_init.o crypto/pkcs12/p12_key.o crypto/pkcs12/p12_kiss.o crypto/pkcs12/p12_mutl.o crypto/pkcs12/p12_npas.o crypto/pkcs12/p12_p8d.o crypto/pkcs12/p12_p8e.o crypto/pkcs12/p12_sbag.o crypto/pkcs12/p12_utl.o crypto/pkcs12/pk12err.o crypto/pkcs7/bio_pk7.o crypto/pkcs7/pk7_asn1.o crypto/pkcs7/pk7_attr.o crypto/pkcs7/pk7_doit.o crypto/pkcs7/pk7_lib.o crypto/pkcs7/pk7_mime.o crypto/pkcs7/pk7_smime.o crypto/pkcs7/pkcs7err.o crypto/poly1305/poly1305.o crypto/poly1305/poly1305_ameth.o crypto/poly1305/poly1305_pmeth.o crypto/rand/drbg_ctr.o crypto/rand/drbg_lib.o crypto/rand/rand_egd.o crypto/rand/rand_err.o crypto/rand/rand_lib.o crypto/rand/rand_unix.o crypto/rand/rand_vms.o crypto/rand/rand_win.o crypto/rand/randfile.o crypto/rc2/rc2_cbc.o crypto/rc2/rc2_ecb.o crypto/rc2/rc2_skey.o crypto/rc2/rc2cfb64.o crypto/rc2/rc2ofb64.o crypto/rc4/rc4_enc.o crypto/rc4/rc4_skey.o crypto/ripemd/rmd_dgst.o crypto/ripemd/rmd_one.o crypto/rsa/rsa_ameth.o crypto/rsa/rsa_asn1.o crypto/rsa/rsa_chk.o crypto/rsa/rsa_crpt.o crypto/rsa/rsa_depr.o crypto/rsa/rsa_err.o crypto/rsa/rsa_gen.o crypto/rsa/rsa_lib.o crypto/rsa/rsa_meth.o crypto/rsa/rsa_mp.o crypto/rsa/rsa_none.o crypto/rsa/rsa_oaep.o crypto/rsa/rsa_ossl.o crypto/rsa/rsa_pk1.o crypto/rsa/rsa_pmeth.o crypto/rsa/rsa_prn.o crypto/rsa/rsa_pss.o crypto/rsa/rsa_saos.o crypto/rsa/rsa_sign.o crypto/rsa/rsa_ssl.o crypto/rsa/rsa_x931.o crypto/rsa/rsa_x931g.o crypto/sha/keccak1600.o crypto/sha/sha1-mips.o crypto/sha/sha1_one.o crypto/sha/sha1dgst.o crypto/sha/sha256-mips.o crypto/sha/sha256.o crypto/sha/sha512.o crypto/siphash/siphash.o crypto/siphash/siphash_ameth.o crypto/siphash/siphash_pmeth.o crypto/srp/srp_lib.o crypto/srp/srp_vfy.o crypto/stack/stack.o crypto/store/loader_file.o crypto/store/store_err.o crypto/store/store_init.o crypto/store/store_lib.o crypto/store/store_register.o crypto/store/store_strings.o crypto/threads_none.o crypto/threads_pthread.o crypto/threads_win.o crypto/ts/ts_asn1.o crypto/ts/ts_conf.o crypto/ts/ts_err.o crypto/ts/ts_lib.o crypto/ts/ts_req_print.o crypto/ts/ts_req_utils.o crypto/ts/ts_rsp_print.o crypto/ts/ts_rsp_sign.o crypto/ts/ts_rsp_utils.o crypto/ts/ts_rsp_verify.o crypto/ts/ts_verify_ctx.o crypto/txt_db/txt_db.o crypto/ui/ui_err.o crypto/ui/ui_lib.o crypto/ui/ui_null.o crypto/ui/ui_openssl.o crypto/ui/ui_util.o crypto/uid.o crypto/x509/by_dir.o crypto/x509/by_file.o crypto/x509/t_crl.o crypto/x509/t_req.o crypto/x509/t_x509.o crypto/x509/x509_att.o crypto/x509/x509_cmp.o crypto/x509/x509_d2.o crypto/x509/x509_def.o crypto/x509/x509_err.o crypto/x509/x509_ext.o crypto/x509/x509_lu.o crypto/x509/x509_meth.o crypto/x509/x509_obj.o crypto/x509/x509_r2x.o crypto/x509/x509_req.o crypto/x509/x509_set.o crypto/x509/x509_trs.o crypto/x509/x509_txt.o crypto/x509/x509_v3.o crypto/x509/x509_vfy.o crypto/x509/x509_vpm.o crypto/x509/x509cset.o crypto/x509/x509name.o crypto/x509/x509rset.o crypto/x509/x509spki.o crypto/x509/x509type.o crypto/x509/x_all.o crypto/x509/x_attrib.o crypto/x509/x_crl.o crypto/x509/x_exten.o crypto/x509/x_name.o crypto/x509/x_pubkey.o crypto/x509/x_req.o crypto/x509/x_x509.o crypto/x509/x_x509a.o crypto/x509v3/pcy_cache.o crypto/x509v3/pcy_data.o crypto/x509v3/pcy_lib.o crypto/x509v3/pcy_map.o crypto/x509v3/pcy_node.o crypto/x509v3/pcy_tree.o crypto/x509v3/v3_addr.o crypto/x509v3/v3_admis.o crypto/x509v3/v3_akey.o crypto/x509v3/v3_akeya.o crypto/x509v3/v3_alt.o crypto/x509v3/v3_asid.o crypto/x509v3/v3_bcons.o crypto/x509v3/v3_bitst.o crypto/x509v3/v3_conf.o crypto/x509v3/v3_cpols.o crypto/x509v3/v3_crld.o crypto/x509v3/v3_enum.o crypto/x509v3/v3_extku.o crypto/x509v3/v3_genn.o crypto/x509v3/v3_ia5.o crypto/x509v3/v3_info.o crypto/x509v3/v3_int.o crypto/x509v3/v3_lib.o crypto/x509v3/v3_ncons.o crypto/x509v3/v3_pci.o crypto/x509v3/v3_pcia.o crypto/x509v3/v3_pcons.o crypto/x509v3/v3_pku.o crypto/x509v3/v3_pmaps.o crypto/x509v3/v3_prn.o crypto/x509v3/v3_purp.o crypto/x509v3/v3_skey.o crypto/x509v3/v3_sxnet.o crypto/x509v3/v3_tlsf.o crypto/x509v3/v3_utl.o crypto/x509v3/v3err.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-ar: creating libcrypto.a mips-openwrt-linux-musl-ranlib libcrypto.a || echo Never mind. mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/bio_ssl.d.tmp -MT ssl/bio_ssl.o -c -o ssl/bio_ssl.o ssl/bio_ssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/d1_lib.d.tmp -MT ssl/d1_lib.o -c -o ssl/d1_lib.o ssl/d1_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/d1_msg.d.tmp -MT ssl/d1_msg.o -c -o ssl/d1_msg.o ssl/d1_msg.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/d1_srtp.d.tmp -MT ssl/d1_srtp.o -c -o ssl/d1_srtp.o ssl/d1_srtp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/methods.d.tmp -MT ssl/methods.o -c -o ssl/methods.o ssl/methods.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/packet.d.tmp -MT ssl/packet.o -c -o ssl/packet.o ssl/packet.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/pqueue.d.tmp -MT ssl/pqueue.o -c -o ssl/pqueue.o ssl/pqueue.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/dtls1_bitmap.d.tmp -MT ssl/record/dtls1_bitmap.o -c -o ssl/record/dtls1_bitmap.o ssl/record/dtls1_bitmap.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/rec_layer_d1.d.tmp -MT ssl/record/rec_layer_d1.o -c -o ssl/record/rec_layer_d1.o ssl/record/rec_layer_d1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/rec_layer_s3.d.tmp -MT ssl/record/rec_layer_s3.o -c -o ssl/record/rec_layer_s3.o ssl/record/rec_layer_s3.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/ssl3_buffer.d.tmp -MT ssl/record/ssl3_buffer.o -c -o ssl/record/ssl3_buffer.o ssl/record/ssl3_buffer.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/ssl3_record.d.tmp -MT ssl/record/ssl3_record.o -c -o ssl/record/ssl3_record.o ssl/record/ssl3_record.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/record/ssl3_record_tls13.d.tmp -MT ssl/record/ssl3_record_tls13.o -c -o ssl/record/ssl3_record_tls13.o ssl/record/ssl3_record_tls13.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/s3_cbc.d.tmp -MT ssl/s3_cbc.o -c -o ssl/s3_cbc.o ssl/s3_cbc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/s3_enc.d.tmp -MT ssl/s3_enc.o -c -o ssl/s3_enc.o ssl/s3_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/s3_lib.d.tmp -MT ssl/s3_lib.o -c -o ssl/s3_lib.o ssl/s3_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/s3_msg.d.tmp -MT ssl/s3_msg.o -c -o ssl/s3_msg.o ssl/s3_msg.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_asn1.d.tmp -MT ssl/ssl_asn1.o -c -o ssl/ssl_asn1.o ssl/ssl_asn1.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_cert.d.tmp -MT ssl/ssl_cert.o -c -o ssl/ssl_cert.o ssl/ssl_cert.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_ciph.d.tmp -MT ssl/ssl_ciph.o -c -o ssl/ssl_ciph.o ssl/ssl_ciph.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_conf.d.tmp -MT ssl/ssl_conf.o -c -o ssl/ssl_conf.o ssl/ssl_conf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_err.d.tmp -MT ssl/ssl_err.o -c -o ssl/ssl_err.o ssl/ssl_err.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_init.d.tmp -MT ssl/ssl_init.o -c -o ssl/ssl_init.o ssl/ssl_init.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_lib.d.tmp -MT ssl/ssl_lib.o -c -o ssl/ssl_lib.o ssl/ssl_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_mcnf.d.tmp -MT ssl/ssl_mcnf.o -c -o ssl/ssl_mcnf.o ssl/ssl_mcnf.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_rsa.d.tmp -MT ssl/ssl_rsa.o -c -o ssl/ssl_rsa.o ssl/ssl_rsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_sess.d.tmp -MT ssl/ssl_sess.o -c -o ssl/ssl_sess.o ssl/ssl_sess.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_stat.d.tmp -MT ssl/ssl_stat.o -c -o ssl/ssl_stat.o ssl/ssl_stat.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_txt.d.tmp -MT ssl/ssl_txt.o -c -o ssl/ssl_txt.o ssl/ssl_txt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/ssl_utst.d.tmp -MT ssl/ssl_utst.o -c -o ssl/ssl_utst.o ssl/ssl_utst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/extensions.d.tmp -MT ssl/statem/extensions.o -c -o ssl/statem/extensions.o ssl/statem/extensions.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/extensions_clnt.d.tmp -MT ssl/statem/extensions_clnt.o -c -o ssl/statem/extensions_clnt.o ssl/statem/extensions_clnt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/extensions_cust.d.tmp -MT ssl/statem/extensions_cust.o -c -o ssl/statem/extensions_cust.o ssl/statem/extensions_cust.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/extensions_srvr.d.tmp -MT ssl/statem/extensions_srvr.o -c -o ssl/statem/extensions_srvr.o ssl/statem/extensions_srvr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/statem.d.tmp -MT ssl/statem/statem.o -c -o ssl/statem/statem.o ssl/statem/statem.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/statem_clnt.d.tmp -MT ssl/statem/statem_clnt.o -c -o ssl/statem/statem_clnt.o ssl/statem/statem_clnt.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/statem_dtls.d.tmp -MT ssl/statem/statem_dtls.o -c -o ssl/statem/statem_dtls.o ssl/statem/statem_dtls.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/statem_lib.d.tmp -MT ssl/statem/statem_lib.o -c -o ssl/statem/statem_lib.o ssl/statem/statem_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/statem/statem_srvr.d.tmp -MT ssl/statem/statem_srvr.o -c -o ssl/statem/statem_srvr.o ssl/statem/statem_srvr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/t1_enc.d.tmp -MT ssl/t1_enc.o -c -o ssl/t1_enc.o ssl/t1_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/t1_lib.d.tmp -MT ssl/t1_lib.o -c -o ssl/t1_lib.o ssl/t1_lib.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/t1_trce.d.tmp -MT ssl/t1_trce.o -c -o ssl/t1_trce.o ssl/t1_trce.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/tls13_enc.d.tmp -MT ssl/tls13_enc.o -c -o ssl/tls13_enc.o ssl/tls13_enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DOPENSSL_USE_NODELETE -DOPENSSL_PIC -DOPENSSL_BN_ASM_MONT -DSHA1_ASM -DSHA256_ASM -DAES_ASM -DOPENSSLDIR="\"/etc/ssl\"" -DENGINESDIR="\"/usr/lib/engines-1.1\"" -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF ssl/tls_srp.d.tmp -MT ssl/tls_srp.o -c -o ssl/tls_srp.o ssl/tls_srp.c mips-openwrt-linux-musl-ar r libssl.a ssl/bio_ssl.o ssl/d1_lib.o ssl/d1_msg.o ssl/d1_srtp.o ssl/methods.o ssl/packet.o ssl/pqueue.o ssl/record/dtls1_bitmap.o ssl/record/rec_layer_d1.o ssl/record/rec_layer_s3.o ssl/record/ssl3_buffer.o ssl/record/ssl3_record.o ssl/record/ssl3_record_tls13.o ssl/s3_cbc.o ssl/s3_enc.o ssl/s3_lib.o ssl/s3_msg.o ssl/ssl_asn1.o ssl/ssl_cert.o ssl/ssl_ciph.o ssl/ssl_conf.o ssl/ssl_err.o ssl/ssl_init.o ssl/ssl_lib.o ssl/ssl_mcnf.o ssl/ssl_rsa.o ssl/ssl_sess.o ssl/ssl_stat.o ssl/ssl_txt.o ssl/ssl_utst.o ssl/statem/extensions.o ssl/statem/extensions_clnt.o ssl/statem/extensions_cust.o ssl/statem/extensions_srvr.o ssl/statem/statem.o ssl/statem/statem_clnt.o ssl/statem/statem_dtls.o ssl/statem/statem_lib.o ssl/statem/statem_srvr.o ssl/t1_enc.o ssl/t1_lib.o ssl/t1_trce.o ssl/tls13_enc.o ssl/tls_srp.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-ar: creating libssl.a mips-openwrt-linux-musl-ranlib libssl.a || echo Never mind. /usr/bin/perl util/mkdef.pl crypto linux > libcrypto.map mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections -Wl,-soname=libcrypto.so.1.1 \ -o libcrypto.so.1.1 -Wl,--version-script=libcrypto.map crypto/aes/aes-mips.o crypto/aes/aes_cbc.o crypto/aes/aes_cfb.o crypto/aes/aes_ecb.o crypto/aes/aes_ige.o crypto/aes/aes_misc.o crypto/aes/aes_ofb.o crypto/aes/aes_wrap.o crypto/asn1/a_bitstr.o crypto/asn1/a_d2i_fp.o crypto/asn1/a_digest.o crypto/asn1/a_dup.o crypto/asn1/a_gentm.o crypto/asn1/a_i2d_fp.o crypto/asn1/a_int.o crypto/asn1/a_mbstr.o crypto/asn1/a_object.o crypto/asn1/a_octet.o crypto/asn1/a_print.o crypto/asn1/a_sign.o crypto/asn1/a_strex.o crypto/asn1/a_strnid.o crypto/asn1/a_time.o crypto/asn1/a_type.o crypto/asn1/a_utctm.o crypto/asn1/a_utf8.o crypto/asn1/a_verify.o crypto/asn1/ameth_lib.o crypto/asn1/asn1_err.o crypto/asn1/asn1_gen.o crypto/asn1/asn1_item_list.o crypto/asn1/asn1_lib.o crypto/asn1/asn1_par.o crypto/asn1/asn_mime.o crypto/asn1/asn_moid.o crypto/asn1/asn_mstbl.o crypto/asn1/asn_pack.o crypto/asn1/bio_asn1.o crypto/asn1/bio_ndef.o crypto/asn1/d2i_pr.o crypto/asn1/d2i_pu.o crypto/asn1/evp_asn1.o crypto/asn1/f_int.o crypto/asn1/f_string.o crypto/asn1/i2d_pr.o crypto/asn1/i2d_pu.o crypto/asn1/n_pkey.o crypto/asn1/nsseq.o crypto/asn1/p5_pbe.o crypto/asn1/p5_pbev2.o crypto/asn1/p5_scrypt.o crypto/asn1/p8_pkey.o crypto/asn1/t_bitst.o crypto/asn1/t_pkey.o crypto/asn1/t_spki.o crypto/asn1/tasn_dec.o crypto/asn1/tasn_enc.o crypto/asn1/tasn_fre.o crypto/asn1/tasn_new.o crypto/asn1/tasn_prn.o crypto/asn1/tasn_scn.o crypto/asn1/tasn_typ.o crypto/asn1/tasn_utl.o crypto/asn1/x_algor.o crypto/asn1/x_bignum.o crypto/asn1/x_info.o crypto/asn1/x_int64.o crypto/asn1/x_long.o crypto/asn1/x_pkey.o crypto/asn1/x_sig.o crypto/asn1/x_spki.o crypto/asn1/x_val.o crypto/async/arch/async_null.o crypto/async/arch/async_posix.o crypto/async/arch/async_win.o crypto/async/async.o crypto/async/async_err.o crypto/async/async_wait.o crypto/bf/bf_cfb64.o crypto/bf/bf_ecb.o crypto/bf/bf_enc.o crypto/bf/bf_ofb64.o crypto/bf/bf_skey.o crypto/bio/b_addr.o crypto/bio/b_dump.o crypto/bio/b_print.o crypto/bio/b_sock.o crypto/bio/b_sock2.o crypto/bio/bf_buff.o crypto/bio/bf_lbuf.o crypto/bio/bf_nbio.o crypto/bio/bf_null.o crypto/bio/bio_cb.o crypto/bio/bio_err.o crypto/bio/bio_lib.o crypto/bio/bio_meth.o crypto/bio/bss_acpt.o crypto/bio/bss_bio.o crypto/bio/bss_conn.o crypto/bio/bss_dgram.o crypto/bio/bss_fd.o crypto/bio/bss_file.o crypto/bio/bss_log.o crypto/bio/bss_mem.o crypto/bio/bss_null.o crypto/bio/bss_sock.o crypto/bn/bn-mips.o crypto/bn/bn_add.o crypto/bn/bn_blind.o crypto/bn/bn_const.o crypto/bn/bn_ctx.o crypto/bn/bn_depr.o crypto/bn/bn_dh.o crypto/bn/bn_div.o crypto/bn/bn_err.o crypto/bn/bn_exp.o crypto/bn/bn_exp2.o crypto/bn/bn_gcd.o crypto/bn/bn_gf2m.o crypto/bn/bn_intern.o crypto/bn/bn_kron.o crypto/bn/bn_lib.o crypto/bn/bn_mod.o crypto/bn/bn_mont.o crypto/bn/bn_mpi.o crypto/bn/bn_mul.o crypto/bn/bn_nist.o crypto/bn/bn_prime.o crypto/bn/bn_print.o crypto/bn/bn_rand.o crypto/bn/bn_recp.o crypto/bn/bn_shift.o crypto/bn/bn_sqr.o crypto/bn/bn_sqrt.o crypto/bn/bn_srp.o crypto/bn/bn_word.o crypto/bn/bn_x931p.o crypto/bn/mips-mont.o crypto/buffer/buf_err.o crypto/buffer/buffer.o crypto/cast/c_cfb64.o crypto/cast/c_ecb.o crypto/cast/c_enc.o crypto/cast/c_ofb64.o crypto/cast/c_skey.o crypto/chacha/chacha_enc.o crypto/cmac/cm_ameth.o crypto/cmac/cm_pmeth.o crypto/cmac/cmac.o crypto/cms/cms_asn1.o crypto/cms/cms_att.o crypto/cms/cms_cd.o crypto/cms/cms_dd.o crypto/cms/cms_enc.o crypto/cms/cms_env.o crypto/cms/cms_err.o crypto/cms/cms_ess.o crypto/cms/cms_io.o crypto/cms/cms_kari.o crypto/cms/cms_lib.o crypto/cms/cms_pwri.o crypto/cms/cms_sd.o crypto/cms/cms_smime.o crypto/conf/conf_api.o crypto/conf/conf_def.o crypto/conf/conf_err.o crypto/conf/conf_lib.o crypto/conf/conf_mall.o crypto/conf/conf_mod.o crypto/conf/conf_sap.o crypto/conf/conf_ssl.o crypto/cpt_err.o crypto/cryptlib.o crypto/ct/ct_b64.o crypto/ct/ct_err.o crypto/ct/ct_log.o crypto/ct/ct_oct.o crypto/ct/ct_policy.o crypto/ct/ct_prn.o crypto/ct/ct_sct.o crypto/ct/ct_sct_ctx.o crypto/ct/ct_vfy.o crypto/ct/ct_x509v3.o crypto/ctype.o crypto/cversion.o crypto/des/cbc_cksm.o crypto/des/cbc_enc.o crypto/des/cfb64ede.o crypto/des/cfb64enc.o crypto/des/cfb_enc.o crypto/des/des_enc.o crypto/des/ecb3_enc.o crypto/des/ecb_enc.o crypto/des/fcrypt.o crypto/des/fcrypt_b.o crypto/des/ofb64ede.o crypto/des/ofb64enc.o crypto/des/ofb_enc.o crypto/des/pcbc_enc.o crypto/des/qud_cksm.o crypto/des/rand_key.o crypto/des/set_key.o crypto/des/str2key.o crypto/des/xcbc_enc.o crypto/dh/dh_ameth.o crypto/dh/dh_asn1.o crypto/dh/dh_check.o crypto/dh/dh_depr.o crypto/dh/dh_err.o crypto/dh/dh_gen.o crypto/dh/dh_kdf.o crypto/dh/dh_key.o crypto/dh/dh_lib.o crypto/dh/dh_meth.o crypto/dh/dh_pmeth.o crypto/dh/dh_prn.o crypto/dh/dh_rfc5114.o crypto/dh/dh_rfc7919.o crypto/dsa/dsa_ameth.o crypto/dsa/dsa_asn1.o crypto/dsa/dsa_depr.o crypto/dsa/dsa_err.o crypto/dsa/dsa_gen.o crypto/dsa/dsa_key.o crypto/dsa/dsa_lib.o crypto/dsa/dsa_meth.o crypto/dsa/dsa_ossl.o crypto/dsa/dsa_pmeth.o crypto/dsa/dsa_prn.o crypto/dsa/dsa_sign.o crypto/dsa/dsa_vrf.o crypto/dso/dso_dl.o crypto/dso/dso_dlfcn.o crypto/dso/dso_err.o crypto/dso/dso_lib.o crypto/dso/dso_openssl.o crypto/dso/dso_vms.o crypto/dso/dso_win32.o crypto/ebcdic.o crypto/ec/curve25519.o crypto/ec/curve448/arch_32/f_impl.o crypto/ec/curve448/curve448.o crypto/ec/curve448/curve448_tables.o crypto/ec/curve448/eddsa.o crypto/ec/curve448/f_generic.o crypto/ec/curve448/scalar.o crypto/ec/ec2_oct.o crypto/ec/ec2_smpl.o crypto/ec/ec_ameth.o crypto/ec/ec_asn1.o crypto/ec/ec_check.o crypto/ec/ec_curve.o crypto/ec/ec_cvt.o crypto/ec/ec_err.o crypto/ec/ec_key.o crypto/ec/ec_kmeth.o crypto/ec/ec_lib.o crypto/ec/ec_mult.o crypto/ec/ec_oct.o crypto/ec/ec_pmeth.o crypto/ec/ec_print.o crypto/ec/ecdh_kdf.o crypto/ec/ecdh_ossl.o crypto/ec/ecdsa_ossl.o crypto/ec/ecdsa_sign.o crypto/ec/ecdsa_vrf.o crypto/ec/eck_prn.o crypto/ec/ecp_mont.o crypto/ec/ecp_nist.o crypto/ec/ecp_nistp224.o crypto/ec/ecp_nistp256.o crypto/ec/ecp_nistp521.o crypto/ec/ecp_nistputil.o crypto/ec/ecp_oct.o crypto/ec/ecp_smpl.o crypto/ec/ecx_meth.o crypto/engine/eng_all.o crypto/engine/eng_cnf.o crypto/engine/eng_ctrl.o crypto/engine/eng_dyn.o crypto/engine/eng_err.o crypto/engine/eng_fat.o crypto/engine/eng_init.o crypto/engine/eng_lib.o crypto/engine/eng_list.o crypto/engine/eng_openssl.o crypto/engine/eng_pkey.o crypto/engine/eng_rdrand.o crypto/engine/eng_table.o crypto/engine/tb_asnmth.o crypto/engine/tb_cipher.o crypto/engine/tb_dh.o crypto/engine/tb_digest.o crypto/engine/tb_dsa.o crypto/engine/tb_eckey.o crypto/engine/tb_pkmeth.o crypto/engine/tb_rand.o crypto/engine/tb_rsa.o crypto/err/err.o crypto/err/err_all.o crypto/err/err_prn.o crypto/evp/bio_b64.o crypto/evp/bio_enc.o crypto/evp/bio_md.o crypto/evp/bio_ok.o crypto/evp/c_allc.o crypto/evp/c_alld.o crypto/evp/cmeth_lib.o crypto/evp/digest.o crypto/evp/e_aes.o crypto/evp/e_aes_cbc_hmac_sha1.o crypto/evp/e_aes_cbc_hmac_sha256.o crypto/evp/e_aria.o crypto/evp/e_bf.o crypto/evp/e_camellia.o crypto/evp/e_cast.o crypto/evp/e_chacha20_poly1305.o crypto/evp/e_des.o crypto/evp/e_des3.o crypto/evp/e_idea.o crypto/evp/e_null.o crypto/evp/e_old.o crypto/evp/e_rc2.o crypto/evp/e_rc4.o crypto/evp/e_rc4_hmac_md5.o crypto/evp/e_rc5.o crypto/evp/e_seed.o crypto/evp/e_sm4.o crypto/evp/e_xcbc_d.o crypto/evp/encode.o crypto/evp/evp_cnf.o crypto/evp/evp_enc.o crypto/evp/evp_err.o crypto/evp/evp_key.o crypto/evp/evp_lib.o crypto/evp/evp_pbe.o crypto/evp/evp_pkey.o crypto/evp/m_md2.o crypto/evp/m_md4.o crypto/evp/m_md5.o crypto/evp/m_md5_sha1.o crypto/evp/m_mdc2.o crypto/evp/m_null.o crypto/evp/m_ripemd.o crypto/evp/m_sha1.o crypto/evp/m_sha3.o crypto/evp/m_sigver.o crypto/evp/m_wp.o crypto/evp/names.o crypto/evp/p5_crpt.o crypto/evp/p5_crpt2.o crypto/evp/p_dec.o crypto/evp/p_enc.o crypto/evp/p_lib.o crypto/evp/p_open.o crypto/evp/p_seal.o crypto/evp/p_sign.o crypto/evp/p_verify.o crypto/evp/pbe_scrypt.o crypto/evp/pmeth_fn.o crypto/evp/pmeth_gn.o crypto/evp/pmeth_lib.o crypto/ex_data.o crypto/getenv.o crypto/hmac/hm_ameth.o crypto/hmac/hm_pmeth.o crypto/hmac/hmac.o crypto/init.o crypto/kdf/hkdf.o crypto/kdf/kdf_err.o crypto/kdf/scrypt.o crypto/kdf/tls1_prf.o crypto/lhash/lh_stats.o crypto/lhash/lhash.o crypto/md4/md4_dgst.o crypto/md4/md4_one.o crypto/md5/md5_dgst.o crypto/md5/md5_one.o crypto/mem.o crypto/mem_clr.o crypto/mem_dbg.o crypto/mem_sec.o crypto/modes/cbc128.o crypto/modes/ccm128.o crypto/modes/cfb128.o crypto/modes/ctr128.o crypto/modes/cts128.o crypto/modes/gcm128.o crypto/modes/ocb128.o crypto/modes/ofb128.o crypto/modes/wrap128.o crypto/modes/xts128.o crypto/o_dir.o crypto/o_fips.o crypto/o_fopen.o crypto/o_init.o crypto/o_str.o crypto/o_time.o crypto/objects/o_names.o crypto/objects/obj_dat.o crypto/objects/obj_err.o crypto/objects/obj_lib.o crypto/objects/obj_xref.o crypto/ocsp/ocsp_asn.o crypto/ocsp/ocsp_cl.o crypto/ocsp/ocsp_err.o crypto/ocsp/ocsp_ext.o crypto/ocsp/ocsp_ht.o crypto/ocsp/ocsp_lib.o crypto/ocsp/ocsp_prn.o crypto/ocsp/ocsp_srv.o crypto/ocsp/ocsp_vfy.o crypto/ocsp/v3_ocsp.o crypto/pem/pem_all.o crypto/pem/pem_err.o crypto/pem/pem_info.o crypto/pem/pem_lib.o crypto/pem/pem_oth.o crypto/pem/pem_pk8.o crypto/pem/pem_pkey.o crypto/pem/pem_sign.o crypto/pem/pem_x509.o crypto/pem/pem_xaux.o crypto/pem/pvkfmt.o crypto/pkcs12/p12_add.o crypto/pkcs12/p12_asn.o crypto/pkcs12/p12_attr.o crypto/pkcs12/p12_crpt.o crypto/pkcs12/p12_crt.o crypto/pkcs12/p12_decr.o crypto/pkcs12/p12_init.o crypto/pkcs12/p12_key.o crypto/pkcs12/p12_kiss.o crypto/pkcs12/p12_mutl.o crypto/pkcs12/p12_npas.o crypto/pkcs12/p12_p8d.o crypto/pkcs12/p12_p8e.o crypto/pkcs12/p12_sbag.o crypto/pkcs12/p12_utl.o crypto/pkcs12/pk12err.o crypto/pkcs7/bio_pk7.o crypto/pkcs7/pk7_asn1.o crypto/pkcs7/pk7_attr.o crypto/pkcs7/pk7_doit.o crypto/pkcs7/pk7_lib.o crypto/pkcs7/pk7_mime.o crypto/pkcs7/pk7_smime.o crypto/pkcs7/pkcs7err.o crypto/poly1305/poly1305.o crypto/poly1305/poly1305_ameth.o crypto/poly1305/poly1305_pmeth.o crypto/rand/drbg_ctr.o crypto/rand/drbg_lib.o crypto/rand/rand_egd.o crypto/rand/rand_err.o crypto/rand/rand_lib.o crypto/rand/rand_unix.o crypto/rand/rand_vms.o crypto/rand/rand_win.o crypto/rand/randfile.o crypto/rc2/rc2_cbc.o crypto/rc2/rc2_ecb.o crypto/rc2/rc2_skey.o crypto/rc2/rc2cfb64.o crypto/rc2/rc2ofb64.o crypto/rc4/rc4_enc.o crypto/rc4/rc4_skey.o crypto/ripemd/rmd_dgst.o crypto/ripemd/rmd_one.o crypto/rsa/rsa_ameth.o crypto/rsa/rsa_asn1.o crypto/rsa/rsa_chk.o crypto/rsa/rsa_crpt.o crypto/rsa/rsa_depr.o crypto/rsa/rsa_err.o crypto/rsa/rsa_gen.o crypto/rsa/rsa_lib.o crypto/rsa/rsa_meth.o crypto/rsa/rsa_mp.o crypto/rsa/rsa_none.o crypto/rsa/rsa_oaep.o crypto/rsa/rsa_ossl.o crypto/rsa/rsa_pk1.o crypto/rsa/rsa_pmeth.o crypto/rsa/rsa_prn.o crypto/rsa/rsa_pss.o crypto/rsa/rsa_saos.o crypto/rsa/rsa_sign.o crypto/rsa/rsa_ssl.o crypto/rsa/rsa_x931.o crypto/rsa/rsa_x931g.o crypto/sha/keccak1600.o crypto/sha/sha1-mips.o crypto/sha/sha1_one.o crypto/sha/sha1dgst.o crypto/sha/sha256-mips.o crypto/sha/sha256.o crypto/sha/sha512.o crypto/siphash/siphash.o crypto/siphash/siphash_ameth.o crypto/siphash/siphash_pmeth.o crypto/srp/srp_lib.o crypto/srp/srp_vfy.o crypto/stack/stack.o crypto/store/loader_file.o crypto/store/store_err.o crypto/store/store_init.o crypto/store/store_lib.o crypto/store/store_register.o crypto/store/store_strings.o crypto/threads_none.o crypto/threads_pthread.o crypto/threads_win.o crypto/ts/ts_asn1.o crypto/ts/ts_conf.o crypto/ts/ts_err.o crypto/ts/ts_lib.o crypto/ts/ts_req_print.o crypto/ts/ts_req_utils.o crypto/ts/ts_rsp_print.o crypto/ts/ts_rsp_sign.o crypto/ts/ts_rsp_utils.o crypto/ts/ts_rsp_verify.o crypto/ts/ts_verify_ctx.o crypto/txt_db/txt_db.o crypto/ui/ui_err.o crypto/ui/ui_lib.o crypto/ui/ui_null.o crypto/ui/ui_openssl.o crypto/ui/ui_util.o crypto/uid.o crypto/x509/by_dir.o crypto/x509/by_file.o crypto/x509/t_crl.o crypto/x509/t_req.o crypto/x509/t_x509.o crypto/x509/x509_att.o crypto/x509/x509_cmp.o crypto/x509/x509_d2.o crypto/x509/x509_def.o crypto/x509/x509_err.o crypto/x509/x509_ext.o crypto/x509/x509_lu.o crypto/x509/x509_meth.o crypto/x509/x509_obj.o crypto/x509/x509_r2x.o crypto/x509/x509_req.o crypto/x509/x509_set.o crypto/x509/x509_trs.o crypto/x509/x509_txt.o crypto/x509/x509_v3.o crypto/x509/x509_vfy.o crypto/x509/x509_vpm.o crypto/x509/x509cset.o crypto/x509/x509name.o crypto/x509/x509rset.o crypto/x509/x509spki.o crypto/x509/x509type.o crypto/x509/x_all.o crypto/x509/x_attrib.o crypto/x509/x_crl.o crypto/x509/x_exten.o crypto/x509/x_name.o crypto/x509/x_pubkey.o crypto/x509/x_req.o crypto/x509/x_x509.o crypto/x509/x_x509a.o crypto/x509v3/pcy_cache.o crypto/x509v3/pcy_data.o crypto/x509v3/pcy_lib.o crypto/x509v3/pcy_map.o crypto/x509v3/pcy_node.o crypto/x509v3/pcy_tree.o crypto/x509v3/v3_addr.o crypto/x509v3/v3_admis.o crypto/x509v3/v3_akey.o crypto/x509v3/v3_akeya.o crypto/x509v3/v3_alt.o crypto/x509v3/v3_asid.o crypto/x509v3/v3_bcons.o crypto/x509v3/v3_bitst.o crypto/x509v3/v3_conf.o crypto/x509v3/v3_cpols.o crypto/x509v3/v3_crld.o crypto/x509v3/v3_enum.o crypto/x509v3/v3_extku.o crypto/x509v3/v3_genn.o crypto/x509v3/v3_ia5.o crypto/x509v3/v3_info.o crypto/x509v3/v3_int.o crypto/x509v3/v3_lib.o crypto/x509v3/v3_ncons.o crypto/x509v3/v3_pci.o crypto/x509v3/v3_pcia.o crypto/x509v3/v3_pcons.o crypto/x509v3/v3_pku.o crypto/x509v3/v3_pmaps.o crypto/x509v3/v3_prn.o crypto/x509v3/v3_purp.o crypto/x509v3/v3_skey.o crypto/x509v3/v3_sxnet.o crypto/x509v3/v3_tlsf.o crypto/x509v3/v3_utl.o crypto/x509v3/v3err.o \ -ldl -pthread if [ 'libcrypto.so' != 'libcrypto.so.1.1' ]; then \ rm -f libcrypto.so; \ ln -s libcrypto.so.1.1 libcrypto.so; \ fi /usr/bin/perl util/mkdef.pl ssl linux > libssl.map mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections -Wl,-soname=libssl.so.1.1 \ -o libssl.so.1.1 -Wl,--version-script=libssl.map ssl/bio_ssl.o ssl/d1_lib.o ssl/d1_msg.o ssl/d1_srtp.o ssl/methods.o ssl/packet.o ssl/pqueue.o ssl/record/dtls1_bitmap.o ssl/record/rec_layer_d1.o ssl/record/rec_layer_s3.o ssl/record/ssl3_buffer.o ssl/record/ssl3_record.o ssl/record/ssl3_record_tls13.o ssl/s3_cbc.o ssl/s3_enc.o ssl/s3_lib.o ssl/s3_msg.o ssl/ssl_asn1.o ssl/ssl_cert.o ssl/ssl_ciph.o ssl/ssl_conf.o ssl/ssl_err.o ssl/ssl_init.o ssl/ssl_lib.o ssl/ssl_mcnf.o ssl/ssl_rsa.o ssl/ssl_sess.o ssl/ssl_stat.o ssl/ssl_txt.o ssl/ssl_utst.o ssl/statem/extensions.o ssl/statem/extensions_clnt.o ssl/statem/extensions_cust.o ssl/statem/extensions_srvr.o ssl/statem/statem.o ssl/statem/statem_clnt.o ssl/statem/statem_dtls.o ssl/statem/statem_lib.o ssl/statem/statem_srvr.o ssl/t1_enc.o ssl/t1_lib.o ssl/t1_trce.o ssl/tls13_enc.o ssl/tls_srp.o \ -lcrypto -ldl -pthread if [ 'libssl.so' != 'libssl.so.1.1' ]; then \ rm -f libssl.so; \ ln -s libssl.so.1.1 libssl.so; \ fi mips-openwrt-linux-musl-gcc -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF engines/e_afalg.d.tmp -MT engines/e_afalg.o -c -o engines/e_afalg.o engines/e_afalg.c mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o engines/afalg.so engines/e_afalg.o \ -lcrypto -ldl -pthread mips-openwrt-linux-musl-gcc -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF engines/e_capi.d.tmp -MT engines/e_capi.o -c -o engines/e_capi.o engines/e_capi.c mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o engines/capi.so engines/e_capi.o \ -lcrypto -ldl -pthread mips-openwrt-linux-musl-gcc -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF engines/e_dasync.d.tmp -MT engines/e_dasync.o -c -o engines/e_dasync.o engines/e_dasync.c mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o engines/dasync.so engines/e_dasync.o \ -lcrypto -ldl -pthread mips-openwrt-linux-musl-gcc -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF engines/e_devcrypto.d.tmp -MT engines/e_devcrypto.o -c -o engines/e_devcrypto.o engines/e_devcrypto.c mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o engines/devcrypto.so engines/e_devcrypto.o \ -lcrypto -ldl -pthread mips-openwrt-linux-musl-gcc -Iinclude -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF engines/e_ossltest.d.tmp -MT engines/e_ossltest.o -c -o engines/e_ossltest.o engines/e_ossltest.c mips-openwrt-linux-musl-gcc -fPIC -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -Wl,-znodelete -shared -Wl,-Bsymbolic -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o engines/ossltest.so engines/e_ossltest.o \ -lcrypto -ldl -pthread /usr/bin/perl apps/progs.pl apps/openssl > apps/progs.h mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/asn1pars.d.tmp -MT apps/asn1pars.o -c -o apps/asn1pars.o apps/asn1pars.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ca.d.tmp -MT apps/ca.o -c -o apps/ca.o apps/ca.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ciphers.d.tmp -MT apps/ciphers.o -c -o apps/ciphers.o apps/ciphers.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/cms.d.tmp -MT apps/cms.o -c -o apps/cms.o apps/cms.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/crl.d.tmp -MT apps/crl.o -c -o apps/crl.o apps/crl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/crl2p7.d.tmp -MT apps/crl2p7.o -c -o apps/crl2p7.o apps/crl2p7.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/dgst.d.tmp -MT apps/dgst.o -c -o apps/dgst.o apps/dgst.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/dhparam.d.tmp -MT apps/dhparam.o -c -o apps/dhparam.o apps/dhparam.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/dsa.d.tmp -MT apps/dsa.o -c -o apps/dsa.o apps/dsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/dsaparam.d.tmp -MT apps/dsaparam.o -c -o apps/dsaparam.o apps/dsaparam.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ec.d.tmp -MT apps/ec.o -c -o apps/ec.o apps/ec.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ecparam.d.tmp -MT apps/ecparam.o -c -o apps/ecparam.o apps/ecparam.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/enc.d.tmp -MT apps/enc.o -c -o apps/enc.o apps/enc.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/engine.d.tmp -MT apps/engine.o -c -o apps/engine.o apps/engine.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/errstr.d.tmp -MT apps/errstr.o -c -o apps/errstr.o apps/errstr.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/gendsa.d.tmp -MT apps/gendsa.o -c -o apps/gendsa.o apps/gendsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/genpkey.d.tmp -MT apps/genpkey.o -c -o apps/genpkey.o apps/genpkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/genrsa.d.tmp -MT apps/genrsa.o -c -o apps/genrsa.o apps/genrsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/nseq.d.tmp -MT apps/nseq.o -c -o apps/nseq.o apps/nseq.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ocsp.d.tmp -MT apps/ocsp.o -c -o apps/ocsp.o apps/ocsp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/openssl.d.tmp -MT apps/openssl.o -c -o apps/openssl.o apps/openssl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/passwd.d.tmp -MT apps/passwd.o -c -o apps/passwd.o apps/passwd.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkcs12.d.tmp -MT apps/pkcs12.o -c -o apps/pkcs12.o apps/pkcs12.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkcs7.d.tmp -MT apps/pkcs7.o -c -o apps/pkcs7.o apps/pkcs7.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkcs8.d.tmp -MT apps/pkcs8.o -c -o apps/pkcs8.o apps/pkcs8.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkey.d.tmp -MT apps/pkey.o -c -o apps/pkey.o apps/pkey.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkeyparam.d.tmp -MT apps/pkeyparam.o -c -o apps/pkeyparam.o apps/pkeyparam.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/pkeyutl.d.tmp -MT apps/pkeyutl.o -c -o apps/pkeyutl.o apps/pkeyutl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/prime.d.tmp -MT apps/prime.o -c -o apps/prime.o apps/prime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/rand.d.tmp -MT apps/rand.o -c -o apps/rand.o apps/rand.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/rehash.d.tmp -MT apps/rehash.o -c -o apps/rehash.o apps/rehash.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/req.d.tmp -MT apps/req.o -c -o apps/req.o apps/req.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/rsa.d.tmp -MT apps/rsa.o -c -o apps/rsa.o apps/rsa.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/rsautl.d.tmp -MT apps/rsautl.o -c -o apps/rsautl.o apps/rsautl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/s_client.d.tmp -MT apps/s_client.o -c -o apps/s_client.o apps/s_client.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/s_server.d.tmp -MT apps/s_server.o -c -o apps/s_server.o apps/s_server.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/s_time.d.tmp -MT apps/s_time.o -c -o apps/s_time.o apps/s_time.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/sess_id.d.tmp -MT apps/sess_id.o -c -o apps/sess_id.o apps/sess_id.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/smime.d.tmp -MT apps/smime.o -c -o apps/smime.o apps/smime.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/speed.d.tmp -MT apps/speed.o -c -o apps/speed.o apps/speed.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/spkac.d.tmp -MT apps/spkac.o -c -o apps/spkac.o apps/spkac.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/srp.d.tmp -MT apps/srp.o -c -o apps/srp.o apps/srp.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/storeutl.d.tmp -MT apps/storeutl.o -c -o apps/storeutl.o apps/storeutl.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/ts.d.tmp -MT apps/ts.o -c -o apps/ts.o apps/ts.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/verify.d.tmp -MT apps/verify.o -c -o apps/verify.o apps/verify.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/version.d.tmp -MT apps/version.o -c -o apps/version.o apps/version.c mips-openwrt-linux-musl-gcc -I. -Iinclude -Iapps -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -DNDEBUG -DOPENSSL_PREFER_CHACHA_OVER_GCM -DOPENSSL_SMALL_FOOTPRINT -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/include -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include/fortify -I/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/include -MMD -MF apps/x509.d.tmp -MT apps/x509.o -c -o apps/x509.o apps/x509.c rm -f apps/openssl ${LDCMD:-mips-openwrt-linux-musl-gcc} -pthread -mabi=32 -Wa,--noexecstack -Wall -O3 -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -ffile-prefix-map=/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k=openssl-1.1.1k -Wformat -Werror=format-security -fstack-protector -D_FORTIFY_SOURCE=1 -Wl,-z,now -Wl,-z,relro -fpic -ffunction-sections -fdata-sections -znow -zrelro -L. -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -Wl,--gc-sections \ -o apps/openssl apps/asn1pars.o apps/ca.o apps/ciphers.o apps/cms.o apps/crl.o apps/crl2p7.o apps/dgst.o apps/dhparam.o apps/dsa.o apps/dsaparam.o apps/ec.o apps/ecparam.o apps/enc.o apps/engine.o apps/errstr.o apps/gendsa.o apps/genpkey.o apps/genrsa.o apps/nseq.o apps/ocsp.o apps/openssl.o apps/passwd.o apps/pkcs12.o apps/pkcs7.o apps/pkcs8.o apps/pkey.o apps/pkeyparam.o apps/pkeyutl.o apps/prime.o apps/rand.o apps/rehash.o apps/req.o apps/rsa.o apps/rsautl.o apps/s_client.o apps/s_server.o apps/s_time.o apps/sess_id.o apps/smime.o apps/speed.o apps/spkac.o apps/srp.o apps/storeutl.o apps/ts.o apps/verify.o apps/version.o apps/x509.o \ apps/libapps.a -lssl -lcrypto -ldl -pthread /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" apps/CA.pl.in > "apps/CA.pl" chmod a+x apps/CA.pl /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" apps/tsget.in > "apps/tsget.pl" chmod a+x apps/tsget.pl /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" tools/c_rehash.in > "tools/c_rehash" chmod a+x tools/c_rehash /usr/bin/perl "-I." -Mconfigdata "util/dofile.pl" \ "-oMakefile" util/shlib_wrap.sh.in > "util/shlib_wrap.sh" chmod a+x util/shlib_wrap.sh make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make -C /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k CC="mips-openwrt-linux-musl-gcc" DESTDIR="/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install" install_sw install_ssldirs make[4]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make depend && make _build_libs make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Nothing to be done for '_build_libs'. make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib' *** Installing runtime libraries install libcrypto.so.1.1 -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.so.1.1 install libssl.so.1.1 -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.so.1.1 *** Installing development files created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl' install ./include/openssl/aes.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/aes.h install ./include/openssl/asn1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1.h install ./include/openssl/asn1_mac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1_mac.h install ./include/openssl/asn1err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1err.h install ./include/openssl/asn1t.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1t.h install ./include/openssl/async.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/async.h install ./include/openssl/asyncerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asyncerr.h install ./include/openssl/bio.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bio.h install ./include/openssl/bioerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bioerr.h install ./include/openssl/blowfish.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/blowfish.h install ./include/openssl/bn.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bn.h install ./include/openssl/bnerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bnerr.h install ./include/openssl/buffer.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/buffer.h install ./include/openssl/buffererr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/buffererr.h install ./include/openssl/camellia.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/camellia.h install ./include/openssl/cast.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cast.h install ./include/openssl/cmac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cmac.h install ./include/openssl/cms.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cms.h install ./include/openssl/cmserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cmserr.h install ./include/openssl/comp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/comp.h install ./include/openssl/comperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/comperr.h install ./include/openssl/conf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conf.h install ./include/openssl/conf_api.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conf_api.h install ./include/openssl/conferr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conferr.h install ./include/openssl/crypto.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/crypto.h install ./include/openssl/cryptoerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cryptoerr.h install ./include/openssl/ct.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ct.h install ./include/openssl/cterr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cterr.h install ./include/openssl/des.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/des.h install ./include/openssl/dh.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dh.h install ./include/openssl/dherr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dherr.h install ./include/openssl/dsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dsa.h install ./include/openssl/dsaerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dsaerr.h install ./include/openssl/dtls1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dtls1.h install ./include/openssl/e_os2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/e_os2.h install ./include/openssl/ebcdic.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ebcdic.h install ./include/openssl/ec.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ec.h install ./include/openssl/ecdh.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecdh.h install ./include/openssl/ecdsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecdsa.h install ./include/openssl/ecerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecerr.h install ./include/openssl/engine.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/engine.h install ./include/openssl/engineerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/engineerr.h install ./include/openssl/err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/err.h install ./include/openssl/evp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/evp.h install ./include/openssl/evperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/evperr.h install ./include/openssl/hmac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/hmac.h install ./include/openssl/idea.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/idea.h install ./include/openssl/kdf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/kdf.h install ./include/openssl/kdferr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/kdferr.h install ./include/openssl/lhash.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/lhash.h install ./include/openssl/md2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md2.h install ./include/openssl/md4.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md4.h install ./include/openssl/md5.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md5.h install ./include/openssl/mdc2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/mdc2.h install ./include/openssl/modes.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/modes.h install ./include/openssl/obj_mac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/obj_mac.h install ./include/openssl/objects.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/objects.h install ./include/openssl/objectserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/objectserr.h install ./include/openssl/ocsp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ocsp.h install ./include/openssl/ocsperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ocsperr.h install ./include/openssl/opensslconf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/opensslconf.h install ./include/openssl/opensslv.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/opensslv.h install ./include/openssl/ossl_typ.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ossl_typ.h install ./include/openssl/pem.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pem.h install ./include/openssl/pem2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pem2.h install ./include/openssl/pemerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pemerr.h install ./include/openssl/pkcs12.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs12.h install ./include/openssl/pkcs12err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs12err.h install ./include/openssl/pkcs7.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs7.h install ./include/openssl/pkcs7err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs7err.h install ./include/openssl/rand.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rand.h install ./include/openssl/rand_drbg.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rand_drbg.h install ./include/openssl/randerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/randerr.h install ./include/openssl/rc2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc2.h install ./include/openssl/rc4.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc4.h install ./include/openssl/rc5.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc5.h install ./include/openssl/ripemd.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ripemd.h install ./include/openssl/rsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rsa.h install ./include/openssl/rsaerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rsaerr.h install ./include/openssl/safestack.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/safestack.h install ./include/openssl/seed.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/seed.h install ./include/openssl/sha.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/sha.h install ./include/openssl/srp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/srp.h install ./include/openssl/srtp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/srtp.h install ./include/openssl/ssl.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl.h install ./include/openssl/ssl2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl2.h install ./include/openssl/ssl3.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl3.h install ./include/openssl/sslerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/sslerr.h install ./include/openssl/stack.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/stack.h install ./include/openssl/store.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/store.h install ./include/openssl/storeerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/storeerr.h install ./include/openssl/symhacks.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/symhacks.h install ./include/openssl/tls1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/tls1.h install ./include/openssl/ts.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ts.h install ./include/openssl/tserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/tserr.h install ./include/openssl/txt_db.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/txt_db.h install ./include/openssl/ui.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ui.h install ./include/openssl/uierr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/uierr.h install ./include/openssl/whrlpool.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/whrlpool.h install ./include/openssl/x509.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509.h install ./include/openssl/x509_vfy.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509_vfy.h install ./include/openssl/x509err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509err.h install ./include/openssl/x509v3.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509v3.h install ./include/openssl/x509v3err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509v3err.h install ./include/openssl/aes.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/aes.h install ./include/openssl/asn1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1.h install ./include/openssl/asn1_mac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1_mac.h install ./include/openssl/asn1err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1err.h install ./include/openssl/asn1t.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asn1t.h install ./include/openssl/async.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/async.h install ./include/openssl/asyncerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/asyncerr.h install ./include/openssl/bio.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bio.h install ./include/openssl/bioerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bioerr.h install ./include/openssl/blowfish.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/blowfish.h install ./include/openssl/bn.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bn.h install ./include/openssl/bnerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/bnerr.h install ./include/openssl/buffer.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/buffer.h install ./include/openssl/buffererr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/buffererr.h install ./include/openssl/camellia.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/camellia.h install ./include/openssl/cast.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cast.h install ./include/openssl/cmac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cmac.h install ./include/openssl/cms.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cms.h install ./include/openssl/cmserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cmserr.h install ./include/openssl/comp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/comp.h install ./include/openssl/comperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/comperr.h install ./include/openssl/conf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conf.h install ./include/openssl/conf_api.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conf_api.h install ./include/openssl/conferr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/conferr.h install ./include/openssl/crypto.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/crypto.h install ./include/openssl/cryptoerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cryptoerr.h install ./include/openssl/ct.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ct.h install ./include/openssl/cterr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/cterr.h install ./include/openssl/des.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/des.h install ./include/openssl/dh.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dh.h install ./include/openssl/dherr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dherr.h install ./include/openssl/dsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dsa.h install ./include/openssl/dsaerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dsaerr.h install ./include/openssl/dtls1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/dtls1.h install ./include/openssl/e_os2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/e_os2.h install ./include/openssl/ebcdic.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ebcdic.h install ./include/openssl/ec.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ec.h install ./include/openssl/ecdh.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecdh.h install ./include/openssl/ecdsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecdsa.h install ./include/openssl/ecerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ecerr.h install ./include/openssl/engine.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/engine.h install ./include/openssl/engineerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/engineerr.h install ./include/openssl/err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/err.h install ./include/openssl/evp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/evp.h install ./include/openssl/evperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/evperr.h install ./include/openssl/hmac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/hmac.h install ./include/openssl/idea.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/idea.h install ./include/openssl/kdf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/kdf.h install ./include/openssl/kdferr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/kdferr.h install ./include/openssl/lhash.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/lhash.h install ./include/openssl/md2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md2.h install ./include/openssl/md4.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md4.h install ./include/openssl/md5.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/md5.h install ./include/openssl/mdc2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/mdc2.h install ./include/openssl/modes.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/modes.h install ./include/openssl/obj_mac.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/obj_mac.h install ./include/openssl/objects.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/objects.h install ./include/openssl/objectserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/objectserr.h install ./include/openssl/ocsp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ocsp.h install ./include/openssl/ocsperr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ocsperr.h install ./include/openssl/opensslconf.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/opensslconf.h install ./include/openssl/opensslv.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/opensslv.h install ./include/openssl/ossl_typ.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ossl_typ.h install ./include/openssl/pem.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pem.h install ./include/openssl/pem2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pem2.h install ./include/openssl/pemerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pemerr.h install ./include/openssl/pkcs12.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs12.h install ./include/openssl/pkcs12err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs12err.h install ./include/openssl/pkcs7.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs7.h install ./include/openssl/pkcs7err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/pkcs7err.h install ./include/openssl/rand.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rand.h install ./include/openssl/rand_drbg.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rand_drbg.h install ./include/openssl/randerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/randerr.h install ./include/openssl/rc2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc2.h install ./include/openssl/rc4.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc4.h install ./include/openssl/rc5.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rc5.h install ./include/openssl/ripemd.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ripemd.h install ./include/openssl/rsa.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rsa.h install ./include/openssl/rsaerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/rsaerr.h install ./include/openssl/safestack.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/safestack.h install ./include/openssl/seed.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/seed.h install ./include/openssl/sha.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/sha.h install ./include/openssl/srp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/srp.h install ./include/openssl/srtp.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/srtp.h install ./include/openssl/ssl.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl.h install ./include/openssl/ssl2.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl2.h install ./include/openssl/ssl3.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ssl3.h install ./include/openssl/sslerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/sslerr.h install ./include/openssl/stack.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/stack.h install ./include/openssl/store.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/store.h install ./include/openssl/storeerr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/storeerr.h install ./include/openssl/symhacks.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/symhacks.h install ./include/openssl/tls1.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/tls1.h install ./include/openssl/ts.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ts.h install ./include/openssl/tserr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/tserr.h install ./include/openssl/txt_db.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/txt_db.h install ./include/openssl/ui.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/ui.h install ./include/openssl/uierr.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/uierr.h install ./include/openssl/whrlpool.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/whrlpool.h install ./include/openssl/x509.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509.h install ./include/openssl/x509_vfy.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509_vfy.h install ./include/openssl/x509err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509err.h install ./include/openssl/x509v3.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509v3.h install ./include/openssl/x509v3err.h -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl/x509v3err.h install libcrypto.a -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.a install libssl.a -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.a link /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.so -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.so.1.1 link /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.so -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.so.1.1 created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/pkgconfig' install libcrypto.pc -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/pkgconfig/libcrypto.pc install libssl.pc -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/pkgconfig/libssl.pc install openssl.pc -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/pkgconfig/openssl.pc make depend && make _build_engines make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Nothing to be done for '_build_engines'. make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1' *** Installing engines install engines/afalg.so -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/afalg.so install engines/capi.so -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/capi.so install engines/devcrypto.so -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/devcrypto.so make depend && make _build_programs make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Entering directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' make[5]: Nothing to be done for '_build_programs'. make[5]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/bin' *** Installing runtime programs install apps/openssl -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/bin/openssl install ./tools/c_rehash -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/bin/c_rehash created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/certs' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/private' created directory `/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/misc' install ./apps/CA.pl -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/misc/CA.pl install ./apps/tsget.pl -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/misc/tsget.pl link /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/misc/tsget -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/misc/tsget.pl install ./apps/openssl.cnf -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/openssl.cnf.dist install ./apps/openssl.cnf -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/openssl.cnf install ./apps/ct_log_list.cnf -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/ct_log_list.cnf.dist install ./apps/ct_log_list.cnf -> /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/ct_log_list.cnf make[4]: Leaving directory '/home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k' touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.built rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/etc/ssl/certs install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/etc/ssl/private chmod 0700 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/etc/ssl/private install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/usr/lib install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/usr/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/usr/lib/ install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/usr/lib/engines-1.1 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp echo '1.1' | cmp -s - /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libopenssl.version || echo '1.1' > /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo/libopenssl.version SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libopenssl_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/etc/ssl/certs install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/etc/ssl/private chmod 0700 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/etc/ssl/private install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libcrypto.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib/ install -m0644 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/libssl.so.* /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib/ install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib/engines-1.1 find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib/libssl.so.1.1: shared object rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/usr/lib/libcrypto.so.1.1: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/packages/mips_24kc/base /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl /home/build/openwrt/bin/packages/mips_24kc/base Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl into /home/build/openwrt/bin/packages/mips_24kc/base/libopenssl1.1_1.1.1k-1_mips_24kc.ipk rm -rf /home/build/openwrt/tmp/stage-openssl mkdir -p /home/build/openwrt/tmp/stage-openssl/host /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages install -d -m0755 /home/build/openwrt/tmp/stage-openssl/usr/include cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/include/openssl /home/build/openwrt/tmp/stage-openssl/usr/include/ install -d -m0755 /home/build/openwrt/tmp/stage-openssl/usr/lib/ cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/lib{crypto,ssl}.{a,so*} /home/build/openwrt/tmp/stage-openssl/usr/lib/ install -d -m0755 /home/build/openwrt/tmp/stage-openssl/usr/lib/pkgconfig cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/pkgconfig/{openssl,libcrypto,libssl}.pc /home/build/openwrt/tmp/stage-openssl/usr/lib/pkgconfig/ [ -n "-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,--gc-sections" ] && /home/build/openwrt/staging_dir/host/bin/sed -i -e 's#-L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/usr/lib -L/home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/lib -znow -zrelro -Wl,--gc-sections##g' /home/build/openwrt/tmp/stage-openssl/usr/lib/pkgconfig/{openssl,libcrypto,libssl}.pc || true find /home/build/openwrt/tmp/stage-openssl -name '*.la' | xargs -r rm -f; if [ -f /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/openssl.list ]; then /home/build/openwrt/scripts/clean-package.sh "/home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/openssl.list" "/home/build/openwrt/staging_dir/target-mips_24kc_musl"; fi if [ -d /home/build/openwrt/tmp/stage-openssl ]; then (cd /home/build/openwrt/tmp/stage-openssl; find ./ > /home/build/openwrt/tmp/stage-openssl.files); SHELL= flock /home/build/openwrt/tmp/.staging-dir.flock -c ' mv /home/build/openwrt/tmp/stage-openssl.files /home/build/openwrt/staging_dir/target-mips_24kc_musl/packages/openssl.list && cp -fpR /home/build/openwrt/tmp/stage-openssl/* /home/build/openwrt/staging_dir/target-mips_24kc_musl/; '; fi rm -rf /home/build/openwrt/tmp/stage-openssl touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/stamp/.openssl_installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf/etc/ssl cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/openssl.cnf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf/etc/ssl/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; echo "$V_Package_libopenssl_conf_conffiles" > conffiles; ) install -d -m0755 /home/build/openwrt/bin/packages/mips_24kc/base /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf /home/build/openwrt/bin/packages/mips_24kc/base Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-conf into /home/build/openwrt/bin/packages/mips_24kc/base/libopenssl-conf_1.1.1k-1_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf/etc/ssl cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/etc/ssl/openssl.cnf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf/etc/ssl/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf.installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg/usr/lib/engines-1.1 install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/afalg.so /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg/usr/lib/engines-1.1 find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg/usr/lib/engines-1.1/afalg.so: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/packages/mips_24kc/base /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg /home/build/openwrt/bin/packages/mips_24kc/base Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-afalg into /home/build/openwrt/bin/packages/mips_24kc/base/libopenssl-afalg_1.1.1k-1_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg/usr/lib/engines-1.1 install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/afalg.so /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg/usr/lib/engines-1.1 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg.installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto/usr/lib/engines-1.1 install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/devcrypto.so /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto/usr/lib/engines-1.1 find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto/usr/lib/engines-1.1/devcrypto.so: shared object (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/packages/mips_24kc/base /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto /home/build/openwrt/bin/packages/mips_24kc/base Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/libopenssl-devcrypto into /home/build/openwrt/bin/packages/mips_24kc/base/libopenssl-devcrypto_1.1.1k-1_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto/usr/lib/engines-1.1 install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/lib/engines-1.1/devcrypto.so /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto/usr/lib/engines-1.1 touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto.installed mkdir -p /home/build/openwrt/bin/targets/ath79/generic/packages /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util/CONTROL /home/build/openwrt/staging_dir/target-mips_24kc_musl/pkginfo install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/bin/openssl /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util/usr/bin/ find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util -name 'CVS' -o -name '.svn' -o -name '.#*' -o -name '*~'| xargs -r rm -rf export CROSS="mips-openwrt-linux-musl-" NO_RENAME=1 ; NM="mips-openwrt-linux-musl-nm" STRIP="/home/build/openwrt/staging_dir/host/bin/sstrip" STRIP_KMOD="/home/build/openwrt/scripts/strip-kmod.sh" PATCHELF="/home/build/openwrt/staging_dir/host/bin/patchelf" /home/build/openwrt/scripts/rstrip.sh /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util rstrip.sh: /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util/usr/bin/openssl: executable (cd /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util/CONTROL; ( echo "$CONTROL"; printf "Description: "; echo "$DESCRIPTION" | sed -e 's,^[[:space:]]*, ,g'; ) > control; chmod 644 control; ( echo "#!/bin/sh"; echo "[ \"\${IPKG_NO_SCRIPT}\" = \"1\" ] && exit 0"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_postinst \$0 \$@"; ) > postinst; ( echo "#!/bin/sh"; echo "[ -x "\${IPKG_INSTROOT}/lib/functions.sh" ] || exit 0"; echo ". \${IPKG_INSTROOT}/lib/functions.sh"; echo "default_prerm \$0 \$@"; ) > prerm; chmod 0755 postinst prerm; ) install -d -m0755 /home/build/openwrt/bin/packages/mips_24kc/base /home/build/openwrt/staging_dir/host/bin/fakeroot -l /home/build/openwrt/staging_dir/host/lib/libfakeroot.so -f /home/build/openwrt/staging_dir/host/bin/faked /home/build/openwrt/scripts/ipkg-build -m "" /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util /home/build/openwrt/bin/packages/mips_24kc/base Packaged contents of /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-mips_24kc/openssl-util into /home/build/openwrt/bin/packages/mips_24kc/base/openssl-util_1.1.1k-1_mips_24kc.ipk rm -rf /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util.installed /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util mkdir -p /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util install -d -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util/usr/bin install -m0755 /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/ipkg-install/usr/bin/openssl /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util/usr/bin/ touch /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util.installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-conf/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libopenssl-conf_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-afalg/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libopenssl-afalg_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/libopenssl-devcrypto/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.libopenssl-devcrypto_installed mkdir -p /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp SHELL= flock /home/build/openwrt/tmp/.root-copy.flock -c 'cp -fpR /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.pkgdir/openssl-util/. /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/' touch /home/build/openwrt/staging_dir/target-mips_24kc_musl/root-ath79/stamp/.openssl-util_installed touch -r /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.built /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k/.autoremove 2>/dev/null >/dev/null find /home/build/openwrt/build_dir/target-mips_24kc_musl/openssl-1.1.1k -mindepth 1 -maxdepth 1 -not '(' -type f -and -name '.*' -and -size 0 ')' -and -not -name '.pkgdir' | xargs -r rm -rf make[3]: Leaving directory '/home/build/openwrt/feeds/base/package/libs/openssl' time: package/feeds/base/openssl/compile#96.22#18.47#121.36 make[2]: Leaving directory '/home/build/openwrt' make[1]: Leaving directory '/home/build/openwrt' touch /home/build/openwrt/tmp/.ci-sdk-prepared ( echo "SET(CMAKE_SYSTEM_NAME Linux)" ; echo "SET(CMAKE_FIND_ROOT_PATH /home/build/openwrt/staging_dir/target-mips_24kc_musl)" ; echo "SET(OWRT_CROSS /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-)" ; echo 'SET(CMAKE_C_COMPILER ${OWRT_CROSS}gcc)' ; echo 'SET(CMAKE_CXX_COMPILER ${OWRT_CROSS}g++)' ; echo "SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)" ; echo "SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)" ; echo "SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)" ; echo "ADD_DEFINITIONS(-Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float)" ; ) > toolchain.cmake rm -fr ./build 2>/dev/null; mkdir -p ./build && cd ./build && /home/build/openwrt/staging_dir/host/bin/cmake -D CMAKE_BUILD_TYPE=Debug -D CMAKE_TOOLCHAIN_FILE=toolchain.cmake .. ; ret=$? ; if [ $ret != 0 ]; then exit $ret; fi ; make -j1 VERBOSE=1 all ; ret=$? ; if [ $ret != 0 ]; then exit $ret; fi ; cd .. -- The C compiler identification is GNU 8.4.0 -- Detecting C compiler ABI info -- Detecting C compiler ABI info - done -- Check for working C compiler: /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc - skipped -- Detecting C compile features -- Detecting C compile features - done -- Configuring done -- Generating done -- Build files have been written to: /builds/openwrt/project/ustream-ssl/build make[1]: Entering directory '/builds/openwrt/project/ustream-ssl/build' /home/build/openwrt/staging_dir/host/bin/cmake -S/builds/openwrt/project/ustream-ssl -B/builds/openwrt/project/ustream-ssl/build --check-build-system CMakeFiles/Makefile.cmake 0 /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_progress_start /builds/openwrt/project/ustream-ssl/build/CMakeFiles /builds/openwrt/project/ustream-ssl/build//CMakeFiles/progress.marks make -f CMakeFiles/Makefile2 all make[2]: Entering directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-ssl.dir/build.make CMakeFiles/ustream-ssl.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/depend.internal". Scanning dependencies of target ustream-ssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-ssl.dir/build.make CMakeFiles/ustream-ssl.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 12%] Building C object CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-ssl.c [ 25%] Building C object CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-io-openssl.c [ 37%] Building C object CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-openssl.c [ 50%] Linking C shared library libustream-ssl.so /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-ssl.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -fPIC -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -shared -Wl,-soname,libustream-ssl.so -o libustream-ssl.so CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [ 50%] Built target ustream-ssl make -f CMakeFiles/ustream-example-client.dir/build.make CMakeFiles/ustream-example-client.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/depend.internal". Scanning dependencies of target ustream-example-client make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-example-client.dir/build.make CMakeFiles/ustream-example-client.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 62%] Building C object CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o -c /builds/openwrt/project/ustream-ssl/ustream-example-client.c [ 75%] Linking C executable ustream-example-client /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-example-client.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o -o ustream-example-client -Wl,-rpath,/builds/openwrt/project/ustream-ssl/build libustream-ssl.so -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [ 75%] Built target ustream-example-client make -f CMakeFiles/ustream-example-server.dir/build.make CMakeFiles/ustream-example-server.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/depend.internal". Scanning dependencies of target ustream-example-server make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-example-server.dir/build.make CMakeFiles/ustream-example-server.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 87%] Building C object CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o -c /builds/openwrt/project/ustream-ssl/ustream-example-server.c [100%] Linking C executable ustream-example-server /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-example-server.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -g -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o -o ustream-example-server -Wl,-rpath,/builds/openwrt/project/ustream-ssl/build libustream-ssl.so -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [100%] Built target ustream-example-server make[2]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_progress_start /builds/openwrt/project/ustream-ssl/build/CMakeFiles 0 make[1]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' rm -fr ./build 2>/dev/null; mkdir -p ./build && cd ./build && /home/build/openwrt/staging_dir/host/bin/cmake -D CMAKE_BUILD_TYPE=Release -D CMAKE_TOOLCHAIN_FILE=toolchain.cmake .. ; ret=$? ; if [ $ret != 0 ]; then exit $ret; fi ; make -j1 VERBOSE=1 all ; ret=$? ; if [ $ret != 0 ]; then exit $ret; fi ; cd .. -- The C compiler identification is GNU 8.4.0 -- Detecting C compiler ABI info -- Detecting C compiler ABI info - done -- Check for working C compiler: /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc - skipped -- Detecting C compile features -- Detecting C compile features - done -- Configuring done -- Generating done -- Build files have been written to: /builds/openwrt/project/ustream-ssl/build make[1]: Entering directory '/builds/openwrt/project/ustream-ssl/build' /home/build/openwrt/staging_dir/host/bin/cmake -S/builds/openwrt/project/ustream-ssl -B/builds/openwrt/project/ustream-ssl/build --check-build-system CMakeFiles/Makefile.cmake 0 /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_progress_start /builds/openwrt/project/ustream-ssl/build/CMakeFiles /builds/openwrt/project/ustream-ssl/build//CMakeFiles/progress.marks make -f CMakeFiles/Makefile2 all make[2]: Entering directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-ssl.dir/build.make CMakeFiles/ustream-ssl.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-ssl.dir/depend.internal". Scanning dependencies of target ustream-ssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-ssl.dir/build.make CMakeFiles/ustream-ssl.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 12%] Building C object CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-ssl.c [ 25%] Building C object CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-io-openssl.c [ 37%] Building C object CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -Dustream_ssl_EXPORTS -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -fPIC -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o -c /builds/openwrt/project/ustream-ssl/ustream-openssl.c [ 50%] Linking C shared library libustream-ssl.so /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-ssl.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -fPIC -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib -shared -Wl,-soname,libustream-ssl.so -o libustream-ssl.so CMakeFiles/ustream-ssl.dir/ustream-ssl.c.o CMakeFiles/ustream-ssl.dir/ustream-io-openssl.c.o CMakeFiles/ustream-ssl.dir/ustream-openssl.c.o -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [ 50%] Built target ustream-ssl make -f CMakeFiles/ustream-example-client.dir/build.make CMakeFiles/ustream-example-client.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-client.dir/depend.internal". Scanning dependencies of target ustream-example-client make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-example-client.dir/build.make CMakeFiles/ustream-example-client.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 62%] Building C object CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o -c /builds/openwrt/project/ustream-ssl/ustream-example-client.c [ 75%] Linking C executable ustream-example-client /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-example-client.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib CMakeFiles/ustream-example-client.dir/ustream-example-client.c.o -o ustream-example-client -Wl,-rpath,/builds/openwrt/project/ustream-ssl/build libustream-ssl.so -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [ 75%] Built target ustream-example-client make -f CMakeFiles/ustream-example-server.dir/build.make CMakeFiles/ustream-example-server.dir/depend make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' cd /builds/openwrt/project/ustream-ssl/build && /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_depends "Unix Makefiles" /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build /builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/DependInfo.cmake --color= Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/DependInfo.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/depend.internal". Dependee "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/CMakeDirectoryInformation.cmake" is newer than depender "/builds/openwrt/project/ustream-ssl/build/CMakeFiles/ustream-example-server.dir/depend.internal". Scanning dependencies of target ustream-example-server make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' make -f CMakeFiles/ustream-example-server.dir/build.make CMakeFiles/ustream-example-server.dir/build make[3]: Entering directory '/builds/openwrt/project/ustream-ssl/build' [ 87%] Building C object CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -pipe -mno-branch-likely -mips32r2 -mtune=24kc -fno-caller-saves -fno-plt -fhonour-copts -Wno-error=unused-but-set-variable -Wno-error=unused-result -msoft-float -Os -Wall -Werror --std=gnu99 -g3 -Wextra -Werror=implicit-function-declaration -Wformat -Werror=format-security -Werror=format-nonliteral -Wno-unused-parameter -Wmissing-declarations -o CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o -c /builds/openwrt/project/ustream-ssl/ustream-example-server.c [100%] Linking C executable ustream-example-server /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_link_script CMakeFiles/ustream-example-server.dir/link.txt --verbose=1 /home/build/openwrt/staging_dir/toolchain-mips_24kc_gcc-8.4.0_musl/bin/mips-openwrt-linux-musl-gcc -I/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/include -O3 -DNDEBUG -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/usr/lib -L/home/build/openwrt/staging_dir/target-mips_24kc_musl/lib CMakeFiles/ustream-example-server.dir/ustream-example-server.c.o -o ustream-example-server -Wl,-rpath,/builds/openwrt/project/ustream-ssl/build libustream-ssl.so -lubox -lcrypto -lssl make[3]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' [100%] Built target ustream-example-server make[2]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' /home/build/openwrt/staging_dir/host/bin/cmake -E cmake_progress_start /builds/openwrt/project/ustream-ssl/build/CMakeFiles 0 make[1]: Leaving directory '/builds/openwrt/project/ustream-ssl/build' section_end:1624483189:step_script section_start:1624483189:cleanup_file_variables Cleaning up file based variables section_end:1624483189:cleanup_file_variables Job succeeded  ================================================ FILE: common/buildlogger/internal/timestamper/timestamper.go ================================================ package timestamper import ( "bytes" "io" "math" "time" ) const ( StdoutType StreamType = 'O' StderrType StreamType = 'E' PartialLineType LineType = '+' FullLineType LineType = ' ' hextable = "0123456789abcdef" // bufSize is the amount of data this implementation will buffer // when no newline character is found. It is _not_ the maximum line length // any consumer of the logs will receive. bufSize = 8 * 1024 // fracs is the nanosecond length we append fracs = 6 format = "YYYY-mm-ddTHH:MM:SS.123456Z " ) type ( StreamType byte LineType byte ) var ( now = func() time.Time { return time.Now().UTC() } lineEscape = []byte("\n") ) // Logger implements the standard io.Write interface and adds lightweight // metadata in the form of: // // // Where: // - is a RFC3339 Nano formatted date // - is a 2-digit hex encoded user provided stream identifier // - is either 'stdout' or 'stderr' // - is either ' ' (no-op) or '+' (append line to last line) // - is a user provided message. // // This format is intended to be well suited to CI/CD logs, where timed output // can help determine the duration of executed commands. // // A new log line is emitted for each new-line character (\n) found within data // provided to Write(). // // A new log line is also emitted for the last carriage return (\r) in calls to // Write() that don't contain a new-line character. Such lines are often used // to display progress bars, so having them "flushed" to the underlying stream // can help with live log viewing. type Logger struct { buf bytes.Buffer w io.Writer bufStream []byte timeLen int timestamp bool } func New(w io.Writer, streamType StreamType, streamNumber uint8, timestamp bool) *Logger { l := &Logger{ w: w, timestamp: timestamp, } if timestamp { l.timeLen = len(format) } l.bufStream = make([]byte, l.timeLen+4) if timestamp { l.bufStream[l.timeLen-1] = ' ' } l.bufStream[l.timeLen+0] = hextable[streamNumber>>4] l.bufStream[l.timeLen+1] = hextable[streamNumber&0x0f] l.bufStream[l.timeLen+2] = byte(streamType) l.bufStream[l.timeLen+3] = byte(FullLineType) return l } func (l *Logger) Write(p []byte) (n int, err error) { n, err = l.writeLines(p) if err != nil { return n, err } nn, err := l.writeCarriageReturns(p[n:]) n += nn if err != nil { return n, err } nn, err = l.buffer(p[n:]) n += nn return n, err } // buffer is used when we have input data that contains no newline character. // // l.buf is filled with data until either a newline character appears or // we exceed bufSize. When we exceed the buffer size, we flush a new line // and write the buffer to the underlying writer directly. To indicate that // this has occurred, we then set the append flag for the next line to be // written. // // Because we write the buffer to the underling writer when the bufSize has // been exceeded, bufSize is not indicative of the maximum line length a // consumer will receive, it's only used internally so that this implementation // doesn't need to have an infinite sized buffer. func (l *Logger) buffer(p []byte) (n int, err error) { if len(p) == 0 { return 0, nil } // if we exceed our buffer size, write directly to underlying writer // nolint:nestif if len(p)+l.buf.Len() > bufSize { if l.buf.Len() == 0 { if err := l.writeHeader(l.w); err != nil { return 0, err } } _, err := l.w.Write(l.buf.Bytes()) if err != nil { return 0, err } l.buf.Reset() // ensure next write is a continuation l.bufStream[l.timeLen+3] = byte(PartialLineType) nn, err := l.w.Write(p) n += nn if err != nil { return n, err } _, err = l.w.Write(lineEscape) return n, err } // start new buffer if l.buf.Len() == 0 { if err := l.writeHeader(&l.buf); err != nil { return n, err } } // append to existing buffer return l.buf.Write(p) } func (l *Logger) writeLines(p []byte) (n int, err error) { idx := bytes.IndexByte(p, '\n') if idx == -1 { return n, err } if l.buf.Len() > 0 { _, err := l.w.Write(l.buf.Bytes()) if err != nil { return 0, err } l.buf.Reset() nn, err := l.w.Write(p[:idx+1]) n += nn if err != nil { return n, err } } for { idx := bytes.IndexByte(p[n:], '\n') if idx == -1 { return n, err } if err := l.writeHeader(l.w); err != nil { return n, err } nn, err := l.w.Write(p[n : n+idx+1]) n += nn if err != nil { return n, err } } } func (l *Logger) writeCarriageReturns(p []byte) (n int, err error) { idx := bytes.LastIndexByte(p, '\r') if idx == -1 { return n, err } if l.buf.Len() > 0 { _, err := l.w.Write(l.buf.Bytes()) if err != nil { return 0, err } l.buf.Reset() } else { if err := l.writeHeader(l.w); err != nil { return n, err } } // ensure next write is a continuation l.bufStream[l.timeLen+3] = byte(PartialLineType) nn, err := l.w.Write(p[n : n+idx+1]) n += nn if err != nil { return n, err } _, err = l.w.Write(lineEscape) return n, err } func (l *Logger) writeHeader(w io.Writer) error { if l.timestamp { t := now() // time.RFC3339 doesn't add nanosecond precision, and time.RFC3339Nano strips // trailing zeros. Whilst we could use a custom format, this // is slower, as Go as built-in optimizations for RFC3339. So here we use the // non-nano version, and then add nanoseconds to a fixed length. Fixed length // is important because it makes the logs easier for both a human and machine // to read. t.AppendFormat(l.bufStream[:0], time.RFC3339) // replace 'Z' for '.' l.bufStream[l.timeLen-3-fracs] = '.' // ensure nanoseconds doesn't exceed our fracs precision nanos := t.Nanosecond() / int(math.Pow10(9-fracs)) // add nanoseconds and append leading zeros for i := 0; i < fracs; i++ { l.bufStream[l.timeLen-3-i] = hextable[nanos%10] nanos /= 10 } // add 'Z' back l.bufStream[l.timeLen-2] = 'Z' // expand back to full header size l.bufStream = l.bufStream[:l.timeLen+4] } _, err := w.Write(l.bufStream) l.bufStream[l.timeLen+3] = byte(FullLineType) return err } func (l *Logger) Close() error { if l.buf.Len() > 0 { l.buf.Write(lineEscape) _, err := l.w.Write(l.buf.Bytes()) return err } return nil } ================================================ FILE: common/buildlogger/internal/timestamper/timestamper_test.go ================================================ //go:build !integration package timestamper import ( "bytes" "io" "strings" "testing" "time" "github.com/stretchr/testify/assert" ) func setupDummyTime() func() { oldNow := now pretend, _ := time.Parse(time.RFC3339, "2021-01-01T00:00:00.020010Z") now = func() time.Time { pretend = pretend.Add(time.Hour) return pretend.UTC() } return func() { now = oldNow } } // nolint:errcheck func writeLines(w io.Writer) { w.Write([]byte("PREFIX This is the beginning of a new line\n")) w.Write([]byte("PREFIX This is a split ")) w.Write([]byte("up ")) w.Write([]byte("line\n")) w.Write([]byte("PREFIX Progress bar: ")) for i := 0; i < 10; i++ { w.Write([]byte(".\r")) } w.Write([]byte("Done.\r\n")) w.Write([]byte("PREFIX Another windows new-line\r\n")) w.Write([]byte("PREFIX multiple\nnew\nlines\nin\none\n")) w.Write([]byte("\nstart")) w.Write([]byte("\nend\n")) w.Write([]byte("PREFIX Eat carriages\r\r\r\r\r\r\r\n")) w.Write([]byte("PREFIX This is across\ntwo lines\n")) w.Write([]byte("PREFIX The end")) } func TestWithTimestamps(t *testing.T) { // reset local local := time.Local defer func() { time.Local = local }() for _, tz := range []string{"UTC", "Africa/Cairo", "US/Alaska"} { t.Run(tz, func(t *testing.T) { // change timezone loc, _ := time.LoadLocation(tz) time.Local = loc buf := new(bytes.Buffer) defer setupDummyTime()() w := New(buf, StderrType, 255, true) writeLines(w) w.Close() expected := []string{ "2021-01-01T01:00:00.020010Z ffE PREFIX This is the beginning of a new line\n", "2021-01-01T02:00:00.020010Z ffE PREFIX This is a split up line\n", "2021-01-01T03:00:00.020010Z ffE PREFIX Progress bar: .\r\n", "2021-01-01T04:00:00.020010Z ffE+.\r\n", "2021-01-01T05:00:00.020010Z ffE+.\r\n", "2021-01-01T06:00:00.020010Z ffE+.\r\n", "2021-01-01T07:00:00.020010Z ffE+.\r\n", "2021-01-01T08:00:00.020010Z ffE+.\r\n", "2021-01-01T09:00:00.020010Z ffE+.\r\n", "2021-01-01T10:00:00.020010Z ffE+.\r\n", "2021-01-01T11:00:00.020010Z ffE+.\r\n", "2021-01-01T12:00:00.020010Z ffE+.\r\n", "2021-01-01T13:00:00.020010Z ffE+Done.\r\n", "2021-01-01T14:00:00.020010Z ffE PREFIX Another windows new-line\r\n", "2021-01-01T15:00:00.020010Z ffE PREFIX multiple\n", "2021-01-01T16:00:00.020010Z ffE new\n", "2021-01-01T17:00:00.020010Z ffE lines\n", "2021-01-01T18:00:00.020010Z ffE in\n", "2021-01-01T19:00:00.020010Z ffE one\n", "2021-01-01T20:00:00.020010Z ffE \n", "2021-01-01T21:00:00.020010Z ffE start\n", "2021-01-01T22:00:00.020010Z ffE end\n", "2021-01-01T23:00:00.020010Z ffE PREFIX Eat carriages\r\r\r\r\r\r\r\n", "2021-01-02T00:00:00.020010Z ffE PREFIX This is across\n", "2021-01-02T01:00:00.020010Z ffE two lines\n", "2021-01-02T02:00:00.020010Z ffE PREFIX The end\n", } assert.Equal(t, strings.Join(expected, ""), buf.String()) }) } } func TestWithoutTimestamp(t *testing.T) { buf := new(bytes.Buffer) defer setupDummyTime()() w := New(buf, StderrType, 255, false) writeLines(w) w.Close() expected := []string{ "ffE PREFIX This is the beginning of a new line\n", "ffE PREFIX This is a split up line\n", "ffE PREFIX Progress bar: .\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+.\r\n", "ffE+Done.\r\n", "ffE PREFIX Another windows new-line\r\n", "ffE PREFIX multiple\n", "ffE new\n", "ffE lines\n", "ffE in\n", "ffE one\n", "ffE \n", "ffE start\n", "ffE end\n", "ffE PREFIX Eat carriages\r\r\r\r\r\r\r\n", "ffE PREFIX This is across\n", "ffE two lines\n", "ffE PREFIX The end\n", } assert.Equal(t, strings.Join(expected, ""), buf.String()) } // nolint:errcheck func TestForcedFlush(t *testing.T) { buf := new(bytes.Buffer) defer setupDummyTime()() w := New(buf, StderrType, 255, true) w.Write([]byte("PREFIX This is the beginning of a new line\n")) w.Write([]byte("We have no new line character in this write")) w.Write([]byte("... The line is now flushed.\n")) w.Write([]byte("large continuous write incoming")) w.Write(bytes.Repeat([]byte{'.'}, bufSize)) w.Write(bytes.Repeat([]byte{'.'}, bufSize+1)) w.Write([]byte("ended\n")) w.Close() expected := []string{ "2021-01-01T01:00:00.020010Z ffE PREFIX This is the beginning of a new line\n", "2021-01-01T02:00:00.020010Z ffE We have no new line character in this write... The line is now flushed.\n", "2021-01-01T03:00:00.020010Z ffE large continuous write incoming" + strings.Repeat(".", bufSize) + "\n", "2021-01-01T04:00:00.020010Z ffE+" + strings.Repeat(".", bufSize+1) + "\n", "2021-01-01T05:00:00.020010Z ffE+ended\n", } assert.Equal(t, strings.Join(expected, ""), buf.String()) } func BenchmarkWithTimestamps(b *testing.B) { defer setupDummyTime()() w := New(io.Discard, StderrType, 255, true) headerSize := len(format) + 4 line := []byte("This is the beginning of a new line\n") b.SetBytes(int64((headerSize + len(line)) * 200)) b.ResetTimer() for i := 0; i < b.N; i++ { for j := 0; j < 200; j++ { _, _ = w.Write(line) } } } ================================================ FILE: common/buildlogger/internal/tokensanitizer/token_masker.go ================================================ // Package tokensanitizer implements a masking Writer, where specified prefixes are // used to replace the alphabet of any word matching the pattern {prefix}{alphabet} // with the word "[MASKED]". // // The allowed characters in the alphabet part of the token are: // * Alphanumeric characters: 0-9, a-z, A-Z // * Special characters: -, ., _, = // // To achieve masking over Write() boundaries, each prefix has its own writer. // These writers are stacked, with each one calling the next, in length order, // starting with the longest. This allows each writer to scan for their prefix // in-turn, filtering data down to the next writer as required. // // Each tokensanitizer writer tracks when its prefix is being found, and scan until // an unauthorized character is found. It then replaces it the matching characters. // If a full match isn't found, sends the matched bytes to the next writer unmodified. // // The masking write for the `glpat-` prefix is created by default package tokensanitizer import ( "bytes" "io" ) var allTokenPrefixes = []string{ "gloas-", "gldt-", "glrt-", "glcbt-", "glrtr-", "glptt-", "glft-", "glimt-", "glagent-", "glsoat-", "glffct-", "_gitlab_session=", "gltok-", } // https://docs.gitlab.com/security/token_overview/#token-prefixes func DefaultTokenPrefixes(maskAllDefaultTokens bool) []string { tokenPrefixes := []string{"glpat-"} if maskAllDefaultTokens { tokenPrefixes = append(tokenPrefixes, allTokenPrefixes...) } return tokenPrefixes } var ( // alphabet is the character set we expect a token to comform to, not all // tokens will necessarily support all characters here, but the alphabet // should support all tokens. alphabet = [256]bool{ '-': true, '.': true, '0': true, '1': true, '2': true, '3': true, '4': true, '5': true, '6': true, '7': true, '8': true, '9': true, 'A': true, 'B': true, 'C': true, 'D': true, 'E': true, 'F': true, 'G': true, 'H': true, 'I': true, 'J': true, 'K': true, 'L': true, 'M': true, 'N': true, 'O': true, 'P': true, 'Q': true, 'R': true, 'S': true, 'T': true, 'U': true, 'V': true, 'W': true, 'X': true, 'Y': true, 'Z': true, '_': true, 'a': true, 'b': true, 'c': true, 'd': true, 'e': true, 'f': true, 'g': true, 'h': true, 'i': true, 'j': true, 'k': true, 'l': true, 'm': true, 'n': true, 'o': true, 'p': true, 'q': true, 'r': true, 's': true, 't': true, 'u': true, 'v': true, 'w': true, 'x': true, 'y': true, 'z': true, '=': true, } mask = []byte("[MASKED]") ) type TokenSanitizer struct { next io.WriteCloser } // New returns a new TokenSanitizer. // We only allow 10 token prefixes at the moment. Everything else is being silently ignored func New(w io.WriteCloser, prefixes [][]byte) *TokenSanitizer { m := &TokenSanitizer{} m.next = w max := len(prefixes) if max > 15 { max = 15 } for i := 0; i < max; i++ { m.next = &tokenSanitizer{next: m.next, prefix: prefixes[i]} } return m } func (m *TokenSanitizer) Write(p []byte) (n int, err error) { return m.next.Write(p) } // Close flushes any remaining data and closes the underlying writer. func (m *TokenSanitizer) Close() error { return m.next.Close() } type tokenSanitizer struct { prefix []byte matching int masked bool next io.WriteCloser } //nolint:gocognit func (m *tokenSanitizer) Write(p []byte) (n int, err error) { if len(p) == 0 { return 0, nil } // fast path: if the write is "[MASKED]" from an upper-level, don't bother // processing it, send it to the next writer. if bytes.Equal(p, mask) { return m.next.Write(p) } var last int for n < len(p) { if m.matching == len(m.prefix) { if alphabet[p[n]] { m.masked = true n++ last = n continue } if m.masked { m.masked = false _, err := m.next.Write(mask) if err != nil { return n, err } } m.matching = 0 } // optimization: use the faster IndexByte to jump to the start of a // potential prefix and if not found, advance the whole buffer. if m.matching == 0 { off := bytes.IndexByte(p[n:], m.prefix[0]) if off < 0 { n += len(p[n:]) break } if off > -1 { n += off } } // find out how much data we can match: the minimum of len(p) and the // remainder of the prefix. min := len(m.prefix[m.matching:]) if len(p[n:]) < min { min = len(p[n:]) } // try to match the next part of the prefix if bytes.HasPrefix(p[n:], m.prefix[m.matching:m.matching+min]) { // send any data that we've not sent prior to our match to the // next writer. _, err = m.next.Write(p[last:n]) if err != nil { return n, err } m.matching += min n += min last = n if m.matching == len(m.prefix) { _, err := m.next.Write(m.prefix[:m.matching]) if err != nil { return n, err } } continue } // if we didn't complete a prefix match, send the tracked bytes of // the prefix to the next writer unmodified. if m.matching > 0 { _, err = m.next.Write(m.prefix[:m.matching]) if err != nil { return n, err } // if the end of this prefix matches the start of it, try again if m.prefix[0] == p[n] { m.matching = 1 last++ n++ continue } } m.matching = 0 n++ } // any unmatched data is sent to the next writer _, err = m.next.Write(p[last:n]) return n, err } // Close flushes any remaining data and closes the underlying writer. func (m *tokenSanitizer) Close() error { var werr error if m.masked { // When a valid is located at the end of the whole packet, // we leave the Write function without actually writing the mask byte // not revealing any part of the token but not accurately masking it either. // This condition places in the Close function allows us to catch this scenario _, werr = m.next.Write(mask) } else { _, werr = m.next.Write(m.prefix[:m.matching]) } err := m.next.Close() if err == nil { return werr } return err } ================================================ FILE: common/buildlogger/internal/tokensanitizer/token_masker_test.go ================================================ //go:build !integration package tokensanitizer import ( "bytes" "fmt" "io" "math/rand" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal" ) var words = []string{"Lorem", "ipsum", "odor", "amet", "consectetuer", "adipiscing", "elit", "Ad", "sagittis", "volutpat", "aptent", "augue", "dis", "dui", "primis", "laoreet", "taciti", "fusce", "sapien", "ullamcorper", "ex", "venenatis"} func TestTokenMasking(t *testing.T) { tests := map[string]struct { prefixes []string input string expected string }{ "simple prefix masking": { input: "Lorem ipsum dolor sit amet, ex ea commodo glpat-imperdiet in voluptate velit esse", expected: "Lorem ipsum dolor sit amet, ex ea commodo glpat-[MASKED] in voluptate velit esse", }, "prefix at the end of the line": { input: "Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esseglpat-imperdiet", expected: "Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esseglpat-[MASKED]", }, "prefix at the beginning of the line": { input: "glpat-imperdiet Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", expected: "glpat-[MASKED] Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", }, "prefix inside of the line": { input: "esseglpat-imperdiet=_-. end Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit", expected: "esseglpat-[MASKED] end Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit", }, "two prefix concatenate": { input: "glpat-impglpat-erdiet Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", expected: "glpat-[MASKED] Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", }, "multiple packets pat masking": { input: "glpat|-imperdiet Lorem ipsum dolor sit amet, ex ea commodo gl|pat-imperdiet in voluptate velit esse", expected: "glpat-[MASKED] Lorem ipsum dolor sit amet, ex ea commodo glpat-[MASKED] in voluptate velit esse", }, "second multiple packets pat masking": { input: "glpat| -imperdiet Lorem ipsum dolor sit amet", expected: "glpat -imperdiet Lorem ipsum dolor sit amet", }, "long input": { input: "Lorglpat-ipsu dolor sit amglpat-t, consglpat-ctglpat-tur adipiscing glpat-lit, sglpat-d do glpat-iusmod tglpat-mpor incididunt ut laborglpat-=_ glpat-t dolorglpat-=_ magna aliqua.", expected: "Lorglpat-[MASKED] dolor sit amglpat-[MASKED], consglpat-[MASKED] adipiscing glpat-[MASKED], sglpat-[MASKED] do glpat-[MASKED] tglpat-[MASKED] incididunt ut laborglpat-[MASKED] glpat-[MASKED] dolorglpat-[MASKED] magna aliqua.", }, "multiple packets long input": { input: "Lorglpat-ipsu dolor sit amglp|at-t, consglpat-ctg|lpat-tur adipiscing glpat-lit, sglpat-|d do glpat-iusmod t|glpat-mpor incididunt ut |laborglpat-=_ glpat-t dolorglpat-=_ magna aliqua.", expected: "Lorglpat-[MASKED] dolor sit amglpat-[MASKED], consglpat-[MASKED] adipiscing glpat-[MASKED], sglpat-[MASKED] do glpat-[MASKED] tglpat-[MASKED] incididunt ut laborglpat-[MASKED] glpat-[MASKED] dolorglpat-[MASKED] magna aliqua.", }, "second long input": { input: "Lorglpat- ipsu dolor sit amglpat-t, consglpat-ctglpat-tur adipiscing glpat-lit, sglpat-d do glpat-iusmod tglpat-mpor incididunt ut laborglpat-=_ glpat-t dolorglpat-=_ magna aliqua.", expected: "Lorglpat- ipsu dolor sit amglpat-[MASKED], consglpat-[MASKED] adipiscing glpat-[MASKED], sglpat-[MASKED] do glpat-[MASKED] tglpat-[MASKED] incididunt ut laborglpat-[MASKED] glpat-[MASKED] dolorglpat-[MASKED] magna aliqua.", }, "custom prefix with default one at the beginning of the line": { prefixes: []string{"token-"}, input: "token-imperdiet Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", expected: "token-[MASKED] Lorem ipsum dolor sit amet, ex ea commodo in voluptate velit esse", }, "custom prefix with default one multiple packets long input": { prefixes: []string{"tok-"}, input: "Lortok-ipsu dolor sit amt|ok-t, cons-ctg|lpat-tur adipiscing tok-lit, stok-|d gltok-test do tok-iusmod t|tok-mpor incididunt ut |labortok-=_ tok-t dolortok-=_ magna aliqua. Tglpat-llus orci ac auctor auguglpat-eee mauris auguglpat-wEr_ lorem", expected: "Lortok-[MASKED] dolor sit amtok-[MASKED], cons-ctglpat-[MASKED] adipiscing tok-[MASKED], stok-[MASKED] gltok-[MASKED] do tok-[MASKED] ttok-[MASKED] incididunt ut labortok-[MASKED] tok-[MASKED] dolortok-[MASKED] magna aliqua. Tglpat-[MASKED] orci ac auctor auguglpat-[MASKED] mauris auguglpat-[MASKED] lorem", }, "ignored sixteenth prefix and more": { prefixes: []string{"mask1-", "mask2-", "mask3-", "mask4-", "mask5-", "mask6-", "mask7-", "mask8-", "mask9-", "mask10-", "mask11-"}, input: "Lormask1-ipsu dolor sit amm|ask2-t, cons-ctg|lpat-tur adipiscing mask5-lit, smask11-|d do mask7-iusmod t|glpat-mpor incididunt ut |labormask10-=_ mask9-t", expected: "Lormask1-[MASKED] dolor sit ammask2-[MASKED], cons-ctglpat-[MASKED] adipiscing mask5-[MASKED], smask11-d do mask7-iusmod tglpat-[MASKED] incididunt ut labormask10-=_ mask9-t", }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { buf := new(bytes.Buffer) m := New(internal.NewNopCloser(buf), internal.Unique(append(tc.prefixes, DefaultTokenPrefixes(true)...))) parts := bytes.Split([]byte(tc.input), []byte{'|'}) for _, part := range parts { n, err := m.Write(part) require.NoError(t, err) assert.Equal(t, len(part), n) } require.NoError(t, m.Close()) assert.Equal(t, tc.expected, buf.String()) }) } } func BenchmarkTokenMaskingPerformance(b *testing.B) { prefixes := DefaultTokenPrefixes(true) paragraphs := map[string]struct { input string }{ "100K words": { input: generateParagraph(100000, prefixes, words), }, "300K words": { input: generateParagraph(300000, prefixes, words), }, "800K words": { input: generateParagraph(800000, prefixes, words), }, "1.5M words": { input: generateParagraph(1500000, prefixes, words), }, "5M words": { input: generateParagraph(5000000, prefixes, words), }, } tests := map[string]struct { defaultToken []string // expected string }{ "one default token": { defaultToken: prefixes[:1], }, "two default tokens": { defaultToken: prefixes[:2], }, "four default tokens": { defaultToken: prefixes[:4], }, "all but one default tokens": { defaultToken: prefixes[:len(prefixes)-1], }, "all default tokens": { defaultToken: prefixes, }, } for pn, pc := range paragraphs { for tn, tc := range tests { b.Run(fmt.Sprintf("%s_%s", pn, tn), func(b *testing.B) { b.ResetTimer() b.ReportAllocs() for n := 0; n < b.N; n++ { m := New(internal.NewNopCloser(io.Discard), internal.Unique(tc.defaultToken)) n, err := m.Write([]byte(pc.input)) b.SetBytes(int64(n)) require.NoError(b, err) require.NoError(b, m.Close()) assert.Equal(b, len([]byte(pc.input)), n) } }) } } } func BenchmarkTokenMaskingDuration(b *testing.B) { prefixes := DefaultTokenPrefixes(true) input := generateParagraph(5000000, prefixes, words) b.ResetTimer() b.ReportAllocs() for n := 0; n < b.N; n++ { m := New(internal.NewNopCloser(io.Discard), internal.Unique(prefixes)) n, err := m.Write([]byte(input)) b.SetBytes(int64(n)) require.NoError(b, err) require.NoError(b, m.Close()) assert.Equal(b, len([]byte(input)), n) } } func generateParagraph(numberOfWords int, token, wordPool []string) string { words := append([]string{}, wordPool...) sb := strings.Builder{} for _, tok := range token { words = append(words, fmt.Sprintf("%slorem", tok)) } for i := 0; i < numberOfWords; i++ { if i > 0 { sb.WriteString(" ") } sb.WriteString(words[rand.Intn(len(words))]) } return sb.String() } ================================================ FILE: common/buildlogger/internal/unique.go ================================================ package internal import ( "cmp" "slices" "strings" ) func Unique(tokens []string) [][]byte { for idx, token := range tokens { tokens[idx] = strings.TrimSpace(token) } slices.SortFunc(tokens, func(a, b string) int { switch { case len(a) < len(b): return -1 case len(a) > len(b): return 1 } return cmp.Compare(a, b) }) compact := slices.Compact(tokens) unique := make([][]byte, 0, len(compact)) for _, token := range compact { if token == "" { continue } unique = append(unique, []byte(token)) } return unique } ================================================ FILE: common/buildlogger/internal/urlsanitizer/urlsanitizer.go ================================================ // Package urlsanitizer replaces sensitive parameter values with [MASKED]. // // This is achieved by extracting keys in the format of ?key= or &key= and if // the key is deemed sensitive, consumes the value that follows it. package urlsanitizer import ( "bytes" "io" "strings" "unicode" ) // tokenParamKeys are the param keys for sensitive tokens we sanitize (replace // with [MASKED]). var tokenParamKeys = map[string]struct{}{ // 20 characters, used for authenticating to GitLab "private_token": {}, // ~88 characters, a base64 encoded string of random 64 bytes "authenticity_token": {}, // 20 characters. RSS feed token. Unlikely to appear in a build log, but here for backwards compatibility. "rss_token": {}, // 64 characters, Amazon presigned signature hex encoded sha256 hmac "x-amz-signature": {}, // Amazon presigned URL credential is always in the format of // ////aws4_request. "x-amz-credential": {}, // Amazon temporary security token from STS. "x-amz-security-token": {}, } var mask = []byte("[MASKED]") type URLSanitizer struct { w io.WriteCloser match []byte masking bool } // New returns a new URL Sanitizer. func New(w io.WriteCloser) *URLSanitizer { var max int for token := range tokenParamKeys { if len(token) > max { max = len(token) + 1 } } return &URLSanitizer{w: w, match: make([]byte, 0, max)} } //nolint:gocognit func (s *URLSanitizer) Write(p []byte) (n int, err error) { var last int for n < len(p) { // if we're in masking mode, we throw away all bytes until we find // the end of the parameter we're masking. if s.masking { off := bytes.IndexFunc(p[n:], isParamEnd) if off == -1 { // no end found, so skip these bytes n += len(p[n:]) last = n break } else { // end found, so skip the bytes up until the match and write // [MASKED] in their place. n += off last += off s.masking = false _, err = s.w.Write(mask) if err != nil { return n, err } } } // if our match is at capacity (maximum token size), reset it and // continue looking for the next token. if len(s.match) == cap(s.match) { s.match = s.match[:0] } // fast path: if we're not matching any parameters, skip towards ? or & // if none found, we can bail early if len(s.match) == 0 { off := bytes.IndexAny(p[n:], "?&") if off == -1 { n += len(p[n:]) break } else { s.match = append(s.match, p[n+off]) n += off + 1 } } // all of p consumed, so break if n >= len(p) { break } // find any of key name off := bytes.IndexAny(p[n:], "=?&") // if not found, continue adding to key match if off == -1 { s.match = append(s.match, p[n]) n++ continue } // bail early if the key contains another param separator if p[n+off] == '?' || p[n+off] == '&' { s.match = s.match[:0] n += off continue } // bail early if key would exceed our known key sizes if off+len(s.match) > cap(s.match) { s.match = s.match[:0] n++ continue } key := append(s.match, p[n:n+off]...) //nolint:gocritic n += off + 1 // check if the key is one supported, and if so, write data until this // point and move to masking mode if _, ok := tokenParamKeys[strings.ToLower(string(key[1:]))]; ok { _, err = s.w.Write(p[last:n]) if err != nil { return n, err } last = n s.masking = true } // reset match s.match = s.match[:0] } if len(p[last:n]) > 0 { _, err = s.w.Write(p[last:n]) } return n, err } // Close flushes any remaining data and closes the underlying writer. func (s *URLSanitizer) Close() error { var werr error if s.masking { _, werr = s.w.Write(mask) } err := s.w.Close() if err == nil { return werr } return err } func isParamEnd(r rune) bool { // URL parameters cannot include certain characters without percent encoding them // but it's pointless following the actual spec, because nobody else does. // // Using the most common reserved and special characters we know wouldn't // be present in a URL param value is good enough: return r == '?' || r == '&' || unicode.IsSpace(r) || unicode.IsControl(r) } ================================================ FILE: common/buildlogger/internal/urlsanitizer/urlsanitizer_test.go ================================================ //go:build !integration package urlsanitizer import ( "bytes" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger/internal" ) func TestMasking(t *testing.T) { tests := []struct { input string values []string expected string }{ { input: "no escaping at all http://example.org/?test=foobar", expected: "no escaping at all http://example.org/?test=foobar", }, { input: "multiple: &private_token=hello &?x-amz-security-token=hello &?x-amz-security-token=hello ?x-amz-security?x-amz-security-token=hello", expected: "multiple: &private_token=[MASKED] &?x-amz-security-token=[MASKED] &?x-amz-security-token=[MASKED] ?x-amz-security?x-amz-security-token=[MASKED]", }, { input: "above known key size: http://example.org/?this-is-a-really-really-long-key-name=foobar", expected: "above known key size: http://example.org/?this-is-a-really-really-long-key-name=foobar", }, { input: "http://example.com/?private_token=deadbeef sensitive URL at the start", expected: "http://example.com/?private_token=[MASKED] sensitive URL at the start", }, { input: "a sensitive URL at the end http://example.com/?authenticity_token=deadbeef", expected: "a sensitive URL at the end http://example.com/?authenticity_token=[MASKED]", }, { input: "a sensitive URL http://example.com/?rss_token=deadbeef in the middle", expected: "a sensitive URL http://example.com/?rss_token=[MASKED] in the middle", }, { input: "a sensitive URL http://example.com/?X-AMZ-sigNATure=deadbeef with mixed case", expected: "a sensitive URL http://example.com/?X-AMZ-sigNATure=[MASKED] with mixed case", }, { input: "a sensitive URL http://example.com/?param=second&x-amz-credential=deadbeef second param", expected: "a sensitive URL http://example.com/?param=second&x-amz-credential=[MASKED] second param", }, { input: "a sensitive URL http://example.com/?rss_token=hide&x-amz-credential=deadbeef both params", expected: "a sensitive URL http://example.com/?rss_token=[MASKED]&x-amz-credential=[MASKED] both params", }, { input: "a long sensitive URL http://example.com/?x-amz-credential=abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789abcdefghijklmnopqrstuvwxyz0123456789", expected: "a long sensitive URL http://example.com/?x-amz-credential=[MASKED]", }, { input: "a really long sensitive URL http://example.com/?x-amz-credential=" + strings.Repeat("0", 8*1024) + " that is still scrubbed", expected: "a really long sensitive URL http://example.com/?x-amz-credential=[MASKED] that is still scrubbed", }, { input: "spl|it sensit|ive UR|L http://example.com/?x-amz-cred|ential=abcdefghij|klmnopqrstuvwxyz01234567", expected: "split sensitive URL http://example.com/?x-amz-credential=[MASKED]", }, { input: "newline: http://example.com/?x-amz-credential=abc\nhttp://example.com/?x-amz-credential=abc", expected: "newline: http://example.com/?x-amz-credential=[MASKED]\nhttp://example.com/?x-amz-credential=[MASKED]", }, { input: "control character: http://example.com/?x-amz-credential=abc\bhttp://example.com/?x-amz-credential=abc", expected: "control character: http://example.com/?x-amz-credential=[MASKED]\bhttp://example.com/?x-amz-credential=[MASKED]", }, { input: "rss_token=notmasked http://example.com/?rss_token=!@#$A&x-amz-credential=abc&test=test", expected: "rss_token=notmasked http://example.com/?rss_token=[MASKED]&x-amz-credential=[MASKED]&test=test", }, { input: "query string with no value: http://example.com/?x-amz-credential=&private_token=gitlab", expected: "query string with no value: http://example.com/?x-amz-credential=[MASKED]&private_token=[MASKED]", }, { input: "invalid URL with double &: http://example.com/?x-amz-credential=abc&&private_token=gitlab", expected: "invalid URL with double &: http://example.com/?x-amz-credential=[MASKED]&&private_token=[MASKED]", }, { input: "invalid URL with double ?: http://example.com/?x-amz-credential=abc??private_token=gitlab", expected: "invalid URL with double ?: http://example.com/?x-amz-credential=[MASKED]??private_token=[MASKED]", }, { input: "split on &: http://example.com/|&|x-amz-cre|dential=abc", expected: "split on &: http://example.com/&x-amz-credential=[MASKED]", }, { input: "split on ?: http://example.com/|?|x-amz-cre|dential=abc", expected: "split on ?: http://example.com/?x-amz-credential=[MASKED]", }, { input: "split after ?: http://example.com/|?||x-amz-cre|dential=abc", expected: "split after ?: http://example.com/?x-amz-credential=[MASKED]", }, { input: "interweaved tokens: ?|one ?x-amz-credential=abc two=three ?|one=two &token &x-amz-credential=abc =token ?=", expected: "interweaved tokens: ?one ?x-amz-credential=[MASKED] two=three ?one=two &token &x-amz-credential=[MASKED] =token ?=", }, { input: "terminated before mask: ?x", expected: "terminated before mask: ?x", }, { input: "terminated before mask: ?x|-", expected: "terminated before mask: ?x-", }, { input: "terminated before mask: ?x-|", expected: "terminated before mask: ?x-", }, { input: "terminated before mask: ?x-amz-credential=", expected: "terminated before mask: ?x-amz-credential=[MASKED]", }, { input: "terminated before mask: ?x-amz-credential=|", expected: "terminated before mask: ?x-amz-credential=[MASKED]", }, } for _, tc := range tests { t.Run(tc.input, func(t *testing.T) { buf := new(bytes.Buffer) m := New(internal.NewNopCloser(buf)) parts := bytes.Split([]byte(tc.input), []byte{'|'}) for _, part := range parts { n, err := m.Write(part) require.NoError(t, err) assert.Equal(t, len(part), n) } require.NoError(t, m.Close()) assert.Equal(t, tc.expected, buf.String()) }) } } ================================================ FILE: common/buildlogger/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package buildlogger import ( mock "github.com/stretchr/testify/mock" ) // NewMockTrace creates a new instance of MockTrace. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockTrace(t interface { mock.TestingT Cleanup(func()) }) *MockTrace { mock := &MockTrace{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockTrace is an autogenerated mock type for the Trace type type MockTrace struct { mock.Mock } type MockTrace_Expecter struct { mock *mock.Mock } func (_m *MockTrace) EXPECT() *MockTrace_Expecter { return &MockTrace_Expecter{mock: &_m.Mock} } // IsStdout provides a mock function for the type MockTrace func (_mock *MockTrace) IsStdout() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for IsStdout") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockTrace_IsStdout_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsStdout' type MockTrace_IsStdout_Call struct { *mock.Call } // IsStdout is a helper method to define mock.On call func (_e *MockTrace_Expecter) IsStdout() *MockTrace_IsStdout_Call { return &MockTrace_IsStdout_Call{Call: _e.mock.On("IsStdout")} } func (_c *MockTrace_IsStdout_Call) Run(run func()) *MockTrace_IsStdout_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockTrace_IsStdout_Call) Return(b bool) *MockTrace_IsStdout_Call { _c.Call.Return(b) return _c } func (_c *MockTrace_IsStdout_Call) RunAndReturn(run func() bool) *MockTrace_IsStdout_Call { _c.Call.Return(run) return _c } // Write provides a mock function for the type MockTrace func (_mock *MockTrace) Write(bytes []byte) (int, error) { ret := _mock.Called(bytes) if len(ret) == 0 { panic("no return value specified for Write") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(bytes) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(bytes) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(bytes) } else { r1 = ret.Error(1) } return r0, r1 } // MockTrace_Write_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Write' type MockTrace_Write_Call struct { *mock.Call } // Write is a helper method to define mock.On call // - bytes []byte func (_e *MockTrace_Expecter) Write(bytes interface{}) *MockTrace_Write_Call { return &MockTrace_Write_Call{Call: _e.mock.On("Write", bytes)} } func (_c *MockTrace_Write_Call) Run(run func(bytes []byte)) *MockTrace_Write_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *MockTrace_Write_Call) Return(n int, err error) *MockTrace_Write_Call { _c.Call.Return(n, err) return _c } func (_c *MockTrace_Write_Call) RunAndReturn(run func(bytes []byte) (int, error)) *MockTrace_Write_Call { _c.Call.Return(run) return _c } ================================================ FILE: common/buildtest/abort.go ================================================ package buildtest import ( "bytes" "context" "errors" "io" "os" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) type withContext struct { } func (c *withContext) WithContext(ctx context.Context) (context.Context, context.CancelFunc) { ctx, cancel := context.WithCancelCause(ctx) cancel(assert.AnError) return context.WithCancel(ctx) } //nolint:gocognit func RunBuildWithCancel(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { abortIncludeStages := []common.BuildStage{ common.BuildStagePrepare, common.BuildStageGetSources, } abortExcludeStages := []common.BuildStage{ common.BuildStageRestoreCache, common.BuildStageDownloadArtifacts, common.BuildStageAfterScript, common.BuildStageArchiveOnSuccessCache, common.BuildStageArchiveOnFailureCache, common.BuildStageUploadOnFailureArtifacts, common.BuildStageUploadOnSuccessArtifacts, } cancelIncludeStages := []common.BuildStage{ common.BuildStagePrepare, common.BuildStageGetSources, common.BuildStageAfterScript, } cancelExcludeStages := []common.BuildStage{ common.BuildStageArchiveOnSuccessCache, common.BuildStageUploadOnSuccessArtifacts, common.BuildStageRestoreCache, common.BuildStageDownloadArtifacts, common.BuildStageArchiveOnFailureCache, common.BuildStageUploadOnFailureArtifacts, } tests := map[string]struct { setupFn func(*common.Build) onUserStep func(*common.Build, common.JobTrace) includesStage []common.BuildStage excludesStage []common.BuildStage includesContent []string expectedErr error }{ "job script timeout": { setupFn: func(build *common.Build) { build.Variables = append(build.Variables, spec.Variable{ Key: "RUNNER_SCRIPT_TIMEOUT", Value: "5s", }) }, includesStage: []common.BuildStage{ common.BuildStagePrepare, common.BuildStageGetSources, common.BuildStageAfterScript, }, excludesStage: []common.BuildStage{ common.BuildStageRestoreCache, common.BuildStageDownloadArtifacts, common.BuildStageArchiveOnSuccessCache, common.BuildStageArchiveOnFailureCache, common.BuildStageUploadOnFailureArtifacts, common.BuildStageUploadOnSuccessArtifacts, }, includesContent: []string{"job status timedout"}, expectedErr: &common.BuildError{FailureReason: common.JobExecutionTimeout}, }, "system interrupt": { onUserStep: func(build *common.Build, _ common.JobTrace) { build.SystemInterrupt <- os.Interrupt }, includesStage: abortIncludeStages, excludesStage: abortExcludeStages, expectedErr: &common.BuildError{FailureReason: common.RunnerSystemFailure}, }, "job is aborted": { onUserStep: func(_ *common.Build, trace common.JobTrace) { trace.Abort() }, includesStage: abortIncludeStages, excludesStage: abortExcludeStages, expectedErr: &common.BuildError{FailureReason: common.JobCanceled}, }, "job is canceling": { onUserStep: func(_ *common.Build, trace common.JobTrace) { trace.Cancel() }, includesStage: cancelIncludeStages, excludesStage: cancelExcludeStages, includesContent: []string{"job status canceled"}, expectedErr: &common.BuildError{FailureReason: common.JobCanceled}, }, } resp, err := common.GetRemoteLongRunningBuildWithAfterScript(config.Shell) require.NoError(t, err) for tn, tc := range tests { t.Run(tn, func(t *testing.T) { build := &common.Build{ Job: resp, Runner: config, SystemInterrupt: make(chan os.Signal, 1), } buf := new(bytes.Buffer) trace := &common.Trace{Writer: io.MultiWriter(buf, os.Stdout)} if tc.onUserStep != nil { done := OnUserStage(build, func() { tc.onUserStep(build, trace) }) defer done() } if setup != nil { setup(t, build) } if tc.setupFn != nil { tc.setupFn(build) } err := RunBuildWithTrace(t, build, trace) t.Log(buf.String()) assert.True(t, errors.Is(err, tc.expectedErr), "expected: %[1]T (%[1]v), got: %[2]T (%[2]v)", tc.expectedErr, err) for _, stage := range tc.includesStage { assert.Contains(t, buf.String(), common.GetStageDescription(stage)) } for _, stage := range tc.excludesStage { assert.NotContains(t, buf.String(), common.GetStageDescription(stage)) } for _, content := range tc.includesContent { assert.Contains(t, buf.String(), content) } }) } } func RunBuildWithExecutorCancel(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { resp, err := common.GetRemoteLongRunningBuildWithAfterScript(config.Shell) require.NoError(t, err) build := &common.Build{ Job: resp, Runner: config, SystemInterrupt: make(chan os.Signal, 1), } build.ExecutorData = &withContext{} if setup != nil { setup(t, build) } buf := new(bytes.Buffer) trace := &common.Trace{Writer: io.MultiWriter(buf, os.Stdout)} err = RunBuildWithTrace(t, build, trace) t.Log(buf.String()) assert.ErrorIs(t, err, assert.AnError) } ================================================ FILE: common/buildtest/binary.go ================================================ package buildtest import ( "fmt" "os" "os/exec" "runtime" ) func MustBuildBinary(entrypoint string, binaryName string) string { if runtime.GOOS == "windows" { binaryName += ".exe" } cmd := exec.Command("go", "build", "-o", binaryName, entrypoint) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr fmt.Printf("Executing: %v\n", cmd) err := cmd.Run() if err != nil { panic("Error on executing go build for binary: " + entrypoint) } return binaryName } ================================================ FILE: common/buildtest/cleanup.go ================================================ package buildtest import ( "fmt" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func RunBuildWithCleanupGitClone(t *testing.T, build *common.Build) { build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "clone"}, spec.Variable{Key: "FF_ENABLE_JOB_CLEANUP", Value: "true"}, ) out, err := RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Cleaning up project directory and file based variables") } func RunBuildWithCleanupGitFetch(t *testing.T, build *common.Build, untrackedFilename string) { build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}, spec.Variable{Key: "FF_ENABLE_JOB_CLEANUP", Value: "true"}, ) out, err := RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Cleaning up project directory and file based variables") assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFilename)) } func RunBuildWithCleanupNormalSubmoduleStrategy( t *testing.T, build *common.Build, untrackedFileName, untrackedFileInSubmodule string, ) { build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: "normal"}, spec.Variable{Key: "FF_ENABLE_JOB_CLEANUP", Value: "true"}, ) out, err := RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Cleaning up project directory and file based variables") assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFileName)) assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFileInSubmodule)) } func RunBuildWithCleanupRecursiveSubmoduleStrategy( t *testing.T, build *common.Build, untrackedFileName, untrackedFileInSubmodule, untrackedFileInSubSubmodule string, ) { build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: "recursive"}, spec.Variable{Key: "FF_ENABLE_JOB_CLEANUP", Value: "true"}, ) out, err := RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Cleaning up project directory and file based variables") assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFileName)) assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFileInSubmodule)) assert.Contains(t, out, fmt.Sprintf("Removing %s", untrackedFileInSubSubmodule)) } func GetNewUntrackedFileIntoSubmodulesCommands( untrackedFile, untrackedFileInSubmodule, untrackedFileInSubSubmodule string, ) []string { var untrackedFilesResult []string if untrackedFile != "" { untrackedFilesResult = append( untrackedFilesResult, fmt.Sprintf("echo 'this is an untracked file' >> %s", untrackedFile), ) } if untrackedFileInSubmodule != "" { untrackedFilesResult = append( untrackedFilesResult, fmt.Sprintf( "echo 'this is an untracked file in the submodule' >> gitlab-grack/%s", untrackedFileInSubmodule, )) } if untrackedFileInSubSubmodule != "" { untrackedFilesResult = append( untrackedFilesResult, fmt.Sprintf( "echo 'this is an untracked file in the sub-submodule' >> gitlab-grack/tests/example/%s", untrackedFileInSubSubmodule, )) } return untrackedFilesResult } ================================================ FILE: common/buildtest/job_output_limit.go ================================================ package buildtest import ( "os" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/trace" ) func RunRemoteBuildWithJobOutputLimitExceeded(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { runBuildWithJobOutputLimitExceeded(t, config, setup, common.GetRemoteSuccessfulBuild) } func RunBuildWithJobOutputLimitExceeded(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { runBuildWithJobOutputLimitExceeded(t, config, setup, common.GetSuccessfulBuild) } type jobOutputLimitExceededTestCase struct { jobResponse func(t *testing.T, g baseJobGetter) spec.Job handleTrace func(t *testing.T, done chan struct{}, traceBuffer *trace.Buffer, trace common.JobTrace) assertError func(t *testing.T, err error) } var jobOutputLimitExceededTestCases = map[string]jobOutputLimitExceededTestCase{ "successful job": { jobResponse: func(t *testing.T, baseJobGetter baseJobGetter) spec.Job { return getJobResponseWithCommands(t, baseJobGetter, "echo Hello World", "exit 0") }, handleTrace: func(t *testing.T, done chan struct{}, traceBuffer *trace.Buffer, trace common.JobTrace) {}, assertError: func(t *testing.T, err error) { assert.NoError(t, err) }, }, "failed job": { jobResponse: func(t *testing.T, baseJobGetter baseJobGetter) spec.Job { return getJobResponseWithCommands(t, baseJobGetter, "echo Hello World", "exit 1") }, handleTrace: func(t *testing.T, done chan struct{}, traceBuffer *trace.Buffer, trace common.JobTrace) {}, assertError: func(t *testing.T, err error) { var expectedErr *common.BuildError if assert.ErrorAs(t, err, &expectedErr) { assert.Equal(t, 1, expectedErr.ExitCode) assert.Empty(t, expectedErr.FailureReason) } }, }, "canceled job": { jobResponse: func(t *testing.T, baseJobGetter baseJobGetter) spec.Job { return getJobResponseWithCommands(t, baseJobGetter, "echo Hello World", "sleep 10", "exit 0") }, handleTrace: func(t *testing.T, done chan struct{}, traceBuffer *trace.Buffer, trace common.JobTrace) { for { b, berr := traceBuffer.Bytes(0, 1024*1024) require.NoError(t, berr) if strings.Contains(string(b), "Job's log exceeded limit of") { trace.Cancel() } select { case <-time.After(50 * time.Millisecond): case <-done: return } } }, assertError: func(t *testing.T, err error) { var expectedErr *common.BuildError if assert.ErrorAs(t, err, &expectedErr) { assert.Equal(t, 0, expectedErr.ExitCode) assert.Equal(t, common.JobCanceled, expectedErr.FailureReason) } }, }, } func runBuildWithJobOutputLimitExceeded( t *testing.T, config *common.RunnerConfig, setup BuildSetupFn, baseJob func() (spec.Job, error), ) { for tn, tt := range jobOutputLimitExceededTestCases { t.Run(tn, func(t *testing.T) { build := &common.Build{ Job: tt.jobResponse(t, baseJob), Runner: config, SystemInterrupt: make(chan os.Signal, 1), } if setup != nil { setup(t, build) } runBuildWithJobOutputLimitExceededCase(t, tt, build) }) } } func runBuildWithJobOutputLimitExceededCase(t *testing.T, tt jobOutputLimitExceededTestCase, build *common.Build) { traceBuffer, err := trace.New() require.NoError(t, err) traceBuffer.SetLimit(12) jobTrace := &common.Trace{Writer: traceBuffer} done := make(chan struct{}) defer close(done) go tt.handleTrace(t, done, traceBuffer, jobTrace) err = RunBuildWithTrace(t, build, jobTrace) b, berr := traceBuffer.Bytes(0, 1024*1024) require.NoError(t, berr) log := string(b) assert.NotContains(t, log, "with gitlab-runner") assert.Contains(t, log, "Job's log exceeded limit of 12 bytes.") assert.Contains(t, log, "Job execution will continue but no more output will be collected.") tt.assertError(t, err) } ================================================ FILE: common/buildtest/masking.go ================================================ package buildtest import ( "fmt" "math" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/trace" ) func RunBuildWithMasking(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { t.Run("success job", func(t *testing.T) { testBuildWithMasking(t, config, setup, false) }) t.Run("failed job (can mask error message)", func(t *testing.T) { resp, err := common.GetRemoteFailedBuild() require.NoError(t, err) // different platforms/executors report the error differently masks := []string{ "Job failed: exit code 1", "Job failed: exit status 1", "Job failed: run exit (exit code: 1)", "Job failed: command terminated with exit code 1", "Job failed: step \"user_script\": exec: executing script: exit status 1", } build := &common.Build{ Job: resp, Runner: config, } for idx, mask := range masks { build.Variables = append(build.Variables, spec.Variable{Key: fmt.Sprintf("MASK_ERROR_MSG_%d", idx), Value: mask, Masked: true}) } if setup != nil { setup(t, build) } buf, err := trace.New() require.NoError(t, err) defer buf.Close() err = build.Run(&common.Config{}, &common.Trace{Writer: buf}) assert.Error(t, err) buf.Finish() contents, err := buf.Bytes(0, math.MaxInt64) assert.NoError(t, err) for _, mask := range masks { assert.NotContains(t, string(contents), mask) } assert.Contains(t, string(contents), "ERROR: [MASKED]") }) } func RunBuildWithMaskingProxyExec(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { testBuildWithMasking(t, config, setup, true) } func testBuildWithMasking(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn, proxy bool) { config.ProxyExec = &proxy resp, err := common.GetRemoteSuccessfulBuildPrintVars( config.Shell, "MASKED_KEY", "CLEARTEXT_KEY", "MASKED_KEY_OTHER", "URL_MASKED_PARAM", "TOKEN_REVEALS", "ADD_MASK_SECRET", ) require.NoError(t, err) resp.Features.TokenMaskPrefixes = []string{"glpat-", "mytoken:", "foobar-"} if proxy { resp.Steps = append([]spec.Step{ { Name: "before_script", Script: []string{`echo "::add-mask::ADD_MASK_SECRET_VALUE"`}, When: spec.StepWhenAlways, }, }, resp.Steps...) } build := &common.Build{ Job: resp, Runner: config, } build.Variables = append( build.Variables, spec.Variable{Key: "MASKED_KEY", Value: "MASKED_VALUE", Masked: true}, spec.Variable{Key: "CLEARTEXT_KEY", Value: "CLEARTEXT_VALUE", Masked: false}, spec.Variable{Key: "MASKED_KEY_OTHER", Value: "MASKED_VALUE_OTHER", Masked: true}, spec.Variable{Key: "URL_MASKED_PARAM", Value: "https://example.com/?x-amz-credential=foobar"}, spec.Variable{Key: "TOKEN_REVEALS", Value: "glpat-abcdef mytoken:ghijklmno foobar-pqrstuvwxyz"}, // proxy exec masking spec.Variable{Key: "ADD_MASK_SECRET", Value: "ADD_MASK_SECRET_VALUE"}, ) if setup != nil { setup(t, build) } buf, err := trace.New() require.NoError(t, err) defer buf.Close() err = build.Run(&common.Config{}, &common.Trace{Writer: buf}) assert.NoError(t, err) buf.Finish() contents, err := buf.Bytes(0, math.MaxInt64) assert.NoError(t, err) assert.NotContains(t, string(contents), "MASKED_KEY=MASKED_VALUE") assert.Contains(t, string(contents), "MASKED_KEY=[MASKED]") assert.NotContains(t, string(contents), "MASKED_KEY_OTHER=MASKED_VALUE_OTHER") assert.NotContains(t, string(contents), "MASKED_KEY_OTHER=[MASKED]_OTHER") assert.Contains(t, string(contents), "MASKED_KEY_OTHER=[MASKED]") assert.NotContains(t, string(contents), "CLEARTEXT_KEY=[MASKED]") assert.Contains(t, string(contents), "CLEARTEXT_KEY=CLEARTEXT_VALUE") assert.NotContains(t, string(contents), "x-amz-credential=foobar") assert.Contains(t, string(contents), "x-amz-credential=[MASKED]") assert.NotContains(t, string(contents), "glpat-abcdef") assert.NotContains(t, string(contents), "mytoken:ghijklmno") assert.NotContains(t, string(contents), "foobar-pqrstuvwxyz") assert.Contains(t, string(contents), "glpat-[MASKED]") assert.Contains(t, string(contents), "mytoken:[MASKED]") assert.Contains(t, string(contents), "foobar-[MASKED]") if proxy { assert.Contains(t, string(contents), "ADD_MASK_SECRET=[MASKED]") } else { assert.Contains(t, string(contents), "ADD_MASK_SECRET=ADD_MASK_SECRET_VALUE") } } ================================================ FILE: common/buildtest/sections.go ================================================ package buildtest import ( "bytes" "regexp" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) func RunBuildWithSections(t *testing.T, build *common.Build) { build.Features.TraceSections = true build.Variables = append(build.Variables, spec.Variable{ Key: featureflags.ScriptSections, Value: "true", }) buf := new(bytes.Buffer) trace := &common.Trace{Writer: buf} assert.NoError(t, RunBuildWithTrace(t, build, trace)) // section_start:1627911560:section_27e4a11ba6450738[hide_duration=true,collapsed=true]\r\x1b[0K\x1b[32;1m$ echo Hello\n\t\t\t\t\tWorld\x1b[0;m\nHello World\n\x1b[0Ksection_end:1627911560:section_27e4a11ba6450738 assert.Regexp(t, regexp.MustCompile(`(?s)section_start:[0-9]+:section_script_step_[0-9]\[hide_duration=true,collapsed=true\]+.*Hello[\s\S]*?World.*section_end:[0-9]+:section_script_step_[0-9]`), buf.String()) } ================================================ FILE: common/buildtest/test.go ================================================ package buildtest import ( "bytes" "fmt" "net/url" "os" "strings" "testing" "time" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) const testTimeout = 30 * time.Minute type BuildSetupFn func(t *testing.T, build *common.Build) func RunBuildReturningOutput(t *testing.T, build *common.Build) (string, error) { buf := new(bytes.Buffer) err := RunBuildWithTrace(t, build, &common.Trace{Writer: buf}) output := buf.String() t.Log(output) return output, err } func RunBuildWithTrace(t *testing.T, build *common.Build, trace *common.Trace) error { return RunBuildWithOptions(t, build, trace, &common.Config{}) } func RunBuildWithOptions(t *testing.T, build *common.Build, trace *common.Trace, config *common.Config) error { timeoutTimer := time.AfterFunc(testTimeout, func() { t.Log("Timed out") t.FailNow() }) defer timeoutTimer.Stop() return build.Run(config, trace) } func RunBuild(t *testing.T, build *common.Build) error { err := RunBuildWithTrace(t, build, &common.Trace{Writer: os.Stdout}) return err } // OnStage executes the provided function when the provided stage is entered. func OnStage(build *common.Build, stage string, fn func()) func() { exit := make(chan struct{}) inStage := func() bool { currentStage := string(build.CurrentStage()) if strings.HasPrefix(currentStage, stage) { fn() return true } return false } ticker := time.NewTicker(time.Millisecond * 200) go func() { defer ticker.Stop() for { if inStage() { return } select { case <-exit: return case <-ticker.C: } } }() return func() { close(exit) } } // OnUserStage executes the provided function when the CurrentStage() enters // a non-predefined stage. func OnUserStage(build *common.Build, fn func()) func() { return OnStage(build, "step_", fn) } func SetBuildFeatureFlag(build *common.Build, flag string, value bool) { for _, v := range build.Variables { if v.Key == flag { v.Value = fmt.Sprint(value) return } } build.Variables = append(build.Variables, spec.Variable{ Key: flag, Value: fmt.Sprint(value), }) } type baseJobGetter func() (spec.Job, error) // getJobResponseWithCommands is a wrapper that will decorate a JobResponse getter // like common.GetRemoteSuccessfulBuild with a custom commands list func getJobResponseWithCommands(t *testing.T, baseJobGetter baseJobGetter, commands ...string) spec.Job { jobResponse, err := baseJobGetter() require.NoError(t, err) jobResponse.Steps[0].Script = commands return jobResponse } // WithFeatureFlags runs a subtest for the on/off value for each flag provided, // and allows a build object as part of the test to be decorated with the // feature flag variable. func WithEachFeatureFlag(t *testing.T, f func(t *testing.T, setup BuildSetupFn), flags ...string) { if len(flags) == 0 { t.Log("WithEachFeatureFlag: no feature flags provided. Running inner test with no feature flags.") f(t, func(t *testing.T, build *common.Build) {}) return } for _, flag := range flags { for _, value := range []bool{false, true} { t.Run(fmt.Sprintf("%v=%v", flag, value), func(t *testing.T) { f(t, func(t *testing.T, build *common.Build) { SetBuildFeatureFlag(build, flag, value) }) }) } } } // injectJobToken injects a job token into an existing jobResponse by // - setting the jobResponse's token // - updating the jobResponse's gitInfo with an URL with the token // - injecting a CI_JOB_TOKEN jobVariable // It returns the new repo URL with the injected token. func injectJobToken(t *testing.T, jobResponse *spec.Job, token string) *url.URL { repoURLWithToken := func(orgRepoURL, token string) *url.URL { u, err := url.Parse(orgRepoURL) require.NoError(t, err, "parsing original repo URL") u.User = url.UserPassword("gitlab-ci-token", token) return u }(jobResponse.GitInfo.RepoURL, token) jobResponse.Variables.Set(spec.Variable{Key: "CI_JOB_TOKEN", Value: token, Masked: true}) jobResponse.Token = token jobResponse.GitInfo.RepoURL = repoURLWithToken.String() return repoURLWithToken } // InjectJobTokenFromEnv injects a job token from the environment into an existing jobResponse. // It returns the token value and the new repo URL with the injected token. func InjectJobTokenFromEnv(t *testing.T, jobResponse *spec.Job, envVars ...string) (string, *url.URL) { if len(envVars) == 0 { envVars = []string{"GITLAB_TEST_TOKEN", "CI_JOB_TOKEN", "OUTER_CI_JOB_TOKEN"} } var token string for _, envVar := range envVars { if tok, ok := os.LookupEnv(envVar); ok { t.Log("using token from env var", envVar) token = tok break } } if token == "" { t.Fatalf("no token available, considered env vars: %q", envVars) } u := injectJobToken(t, jobResponse, token) return token, u } ================================================ FILE: common/buildtest/variables.go ================================================ package buildtest import ( "bytes" "path/filepath" "regexp" "runtime" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/shells" ) func RunBuildWithExpandedFileVariable(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { resp, err := common.GetRemoteSuccessfulBuildPrintVars( config.Shell, "MY_FILE_VARIABLE", "MY_EXPANDED_FILE_VARIABLE", "RUNNER_TEMP_PROJECT_DIR", ) require.NoError(t, err) build := &common.Build{ Job: resp, Runner: config, } build.Variables = append( build.Variables, spec.Variable{Key: "MY_FILE_VARIABLE", Value: "FILE_CONTENTS", File: true}, spec.Variable{Key: "MY_EXPANDED_FILE_VARIABLE", Value: "${MY_FILE_VARIABLE}_FOOBAR"}, ) if setup != nil { setup(t, build) } out, err := RunBuildReturningOutput(t, build) require.NoError(t, err) matches := regexp.MustCompile(`RUNNER_TEMP_PROJECT_DIR=([^\$%].*)`).FindStringSubmatch(out) require.Equal(t, 2, len(matches)) assert.NotRegexp(t, "MY_EXPANDED_FILE_VARIABLE=.*FILE_CONTENTS_FOOBAR", out) if runtime.GOOS == "windows" { tmpPath := strings.TrimRight(matches[1], "\r") assert.Contains(t, out, "MY_EXPANDED_FILE_VARIABLE="+tmpPath+"\\MY_FILE_VARIABLE_FOOBAR") } else { assert.Contains(t, out, "MY_EXPANDED_FILE_VARIABLE="+matches[1]+"/MY_FILE_VARIABLE_FOOBAR") } } func RunBuildWithPassingEnvsMultistep(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { formatter := shellFormatter(config.Shell) steps := []string{formatter.PipeVar("hello=world") + formatter.EnvName("GITLAB_ENV")} if config.Shell == "bash" { steps = append(steps, `echo 'executed=$(echo "yes")' >> $GITLAB_ENV`) } resp, err := common.GetRemoteBuildResponse(steps...) require.NoError(t, err) build := &common.Build{ Job: resp, Runner: config, } if runtime.GOOS == "linux" && config.Shell == shells.SNPwsh { build.Image.Name = common.TestPwshImage } dir := t.TempDir() build.Runner.RunnerSettings.BuildsDir = filepath.Join(dir, "build") build.Runner.RunnerSettings.CacheDir = filepath.Join(dir, "cache") build.Variables = append(build.Variables, spec.Variable{ Key: "existing", Value: "existingvalue", }) build.Steps = append( build.Steps, spec.Step{ Name: "custom-step", Script: []string{ `echo ` + formatter.EnvName("GITLAB_ENV"), `echo hellovalue=` + formatter.EnvName("hello"), `echo executed=` + formatter.EnvName("executed"), formatter.PipeVar("foo=bar") + formatter.EnvName("GITLAB_ENV"), }, When: spec.StepWhenOnSuccess, }, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{ `echo foovalue=` + formatter.EnvName("foo"), `echo existing=` + formatter.EnvName("existing"), }, When: spec.StepWhenAlways, }, ) build.Cache = append(build.Cache, spec.Cache{ Key: "cache", Paths: spec.ArtifactPaths{"unknown/path/${foo}"}, Policy: spec.CachePolicyPullPush, }) if setup != nil { setup(t, build) } buf := new(bytes.Buffer) trace := &common.Trace{Writer: buf} assert.NoError(t, RunBuildWithTrace(t, build, trace)) contents := buf.String() assert.Contains(t, contents, "existing=existingvalue") assert.Contains(t, contents, "hellovalue=world") assert.Contains(t, contents, "foovalue=bar") assert.Contains(t, contents, "unknown/path/bar: no matching files") assert.NotContains(t, contents, "executed=yes") } func RunBuildWithPassingEnvsJobIsolation(t *testing.T, config *common.RunnerConfig, setup BuildSetupFn) { dir := t.TempDir() run := func(response spec.Job) string { build := &common.Build{ Job: response, Runner: config, } if runtime.GOOS == "linux" && config.Shell == shells.SNPwsh { build.Image.Name = common.TestPwshImage } dir := dir build.Runner.RunnerSettings.BuildsDir = filepath.Join(dir, "build") build.Runner.RunnerSettings.CacheDir = filepath.Join(dir, "cache") if setup != nil { setup(t, build) } buf := new(bytes.Buffer) trace := &common.Trace{Writer: buf} assert.NoError(t, RunBuildWithTrace(t, build, trace)) return buf.String() } formatter := shellFormatter(config.Shell) job1, err := common.GetRemoteBuildResponse(formatter.PipeVar("job_isolation_test=not_isolated") + formatter.EnvName("GITLAB_ENV")) require.NoError(t, err) job2, err := common.GetRemoteBuildResponse(`echo job1_isolation=` + formatter.EnvName("job_isolation_test")) require.NoError(t, err) job1Output := run(job1) job2Output := run(job2) assert.Contains(t, job1Output, formatter.PipeVar("job_isolation_test=not_isolated")+formatter.EnvName("GITLAB_ENV")) assert.Contains(t, job2Output, "job1_isolation") assert.NotContains(t, job2Output, "job1_isolation=not_isolated") } type shellFormatter string func (s shellFormatter) EnvName(name string) string { switch s { case shells.SNPwsh, shells.SNPowershell: return "$env:" + name default: return "$" + name } } func (s shellFormatter) PipeVar(variable string) string { return `echo '` + variable + `' >> ` } ================================================ FILE: common/command.go ================================================ package common import ( "github.com/urfave/cli" clihelpers "gitlab.com/gitlab-org/golang-cli-helpers" ) // Commander executes the command with the cli.Context. type Commander interface { Execute(c *cli.Context) } // CommanderFunc allows the registration of commands without having to explicitly implement // the Commander interface for simple functions. type CommanderFunc func(*cli.Context) // Execute provides default implementation for Commander interface. func (cf CommanderFunc) Execute(c *cli.Context) { cf(c) } // NewCommand constructs a command with the given name, usage, and flags. func NewCommand(name, usage string, data Commander, flags ...cli.Flag) cli.Command { return cli.Command{ Name: name, Usage: usage, Action: data.Execute, Flags: append(flags, clihelpers.GetFlagsFromStruct(data)...), } } // NewCommandWithSubcommands returns a command with the given name, usage, data, subcommands, and flags. func NewCommandWithSubcommands(name, usage string, data Commander, hidden bool, subcommands []cli.Command, flags ...cli.Flag) cli.Command { return cli.Command{ Name: name, Usage: usage, Action: data.Execute, Flags: append(flags, clihelpers.GetFlagsFromStruct(data)...), Hidden: hidden, Subcommands: subcommands, } } ================================================ FILE: common/config/runner/monitoring/job_queuing_durations.go ================================================ package monitoring import ( "fmt" "regexp" "time" "gitlab.com/gitlab-org/gitlab-runner/helpers/timeperiod" ) type JobQueuingDurations []*JobQueuingDuration func (d JobQueuingDurations) Compile() error { var err error for id, q := range d { err = q.Compile() if err != nil { return fmt.Errorf("entry %d: %w", id, err) } } return nil } func (d JobQueuingDurations) GetActiveConfiguration() *JobQueuingDuration { for i := len(d) - 1; i >= 0; i-- { if d[i].InPeriod() { return d[i] } } return nil } type JobQueuingDuration struct { Periods []string `toml:"periods" long:"periods"` Timezone string `toml:"timezone" long:"timezone" json:",omitempty"` Threshold time.Duration `toml:"threshold" long:"threshold"` JobsRunningForProject string `toml:"jobs_running_for_project,omitempty" long:"jobs-running-for-project" json:",omitempty"` jobsRunningForProjectRx *regexp.Regexp timePeriod *timeperiod.TimePeriod timer func() time.Time } func (d *JobQueuingDuration) Compile() error { var err error if d.timer == nil { d.timer = time.Now } d.timePeriod, err = timeperiod.TimePeriodsWithTimer(d.Periods, d.Timezone, d.timer) if err != nil { return fmt.Errorf("periods: %w", err) } d.jobsRunningForProjectRx, err = regexp.Compile(d.JobsRunningForProject) if err != nil { return fmt.Errorf("jobs_running_for_project: %w", err) } return nil } func (d *JobQueuingDuration) InPeriod() bool { return d.timePeriod.InPeriod() } func (d *JobQueuingDuration) JobsRunningForProjectMatched(s string) bool { // If regexp was invalid or not configured at all, we ignore this part of the check if d.jobsRunningForProjectRx == nil { return true } return d.jobsRunningForProjectRx.MatchString(s) } ================================================ FILE: common/config/runner/monitoring/job_queuing_durations_test.go ================================================ //go:build !integration package monitoring import ( "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestJobQueuingDuration_GetActiveConfiguration(t *testing.T) { newTimer := func(now time.Time) func() time.Time { return func() time.Time { return now } } noMatchingDefinition := func(t *testing.T, configuration *JobQueuingDuration) { assert.Nil(t, configuration) } tests := map[string]struct { periods [][]string timezone string assertConfiguration func(t *testing.T, configuration *JobQueuingDuration) }{ "no definitions": { timezone: "UTC", assertConfiguration: noMatchingDefinition, }, "no matching definitions": { periods: [][]string{ {"* * 10 * * * *"}, {"* * 08 * * * *"}, }, timezone: "UTC", assertConfiguration: noMatchingDefinition, }, "one matching definition": { periods: [][]string{ {"* * 10 * * * *"}, {"* * 15 * * * *"}, {"* * 08 * * * *"}, }, timezone: "UTC", assertConfiguration: func(t *testing.T, configuration *JobQueuingDuration) { assert.NotNil(t, configuration) assert.Len(t, configuration.Periods, 1) }, }, "two matching definitions": { periods: [][]string{ {"* * 10 * * * *"}, {"* * 15 * * * *", "1 2 * * * * *"}, {"* * 08 * * * *"}, {"* * 15 * * * *", "3 4 * * * * *"}, }, timezone: "UTC", assertConfiguration: func(t *testing.T, configuration *JobQueuingDuration) { assert.NotNil(t, configuration) assert.Len(t, configuration.Periods, 2) assert.Contains(t, configuration.Periods, "3 4 * * * * *") }, }, "definition matching in different time zone": { periods: [][]string{ {"* * 15 * * * *"}, }, timezone: "Europe/Warsaw", assertConfiguration: noMatchingDefinition, }, "empty periods field": { periods: [][]string{ {}, }, timezone: "UTC", assertConfiguration: noMatchingDefinition, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { var durations JobQueuingDurations parsedTime, err := time.Parse(time.RFC3339, "2006-01-02T15:04:05Z") require.NoError(t, err) for _, periods := range tt.periods { durations = append(durations, &JobQueuingDuration{ Periods: periods, Timezone: tt.timezone, timer: newTimer(parsedTime), }) } err = durations.Compile() assert.NoError(t, err) require.NotNil(t, tt.assertConfiguration, "missing assertion function") tt.assertConfiguration(t, durations.GetActiveConfiguration()) }) } } ================================================ FILE: common/config/runner/monitoring.go ================================================ package runner import ( "fmt" "gitlab.com/gitlab-org/gitlab-runner/common/config/runner/monitoring" ) type Monitoring struct { JobQueuingDurations monitoring.JobQueuingDurations `toml:"job_queuing_durations,omitempty" long:"job-queuing-duration" json:",omitempty"` } func (m *Monitoring) Compile() error { var err error if m.JobQueuingDurations != nil { err = m.JobQueuingDurations.Compile() if err != nil { return fmt.Errorf("compiling job_queuing_durations: %w", err) } } return nil } ================================================ FILE: common/config.go ================================================ package common import ( "bufio" "bytes" "encoding/json" "errors" "fmt" "math/big" "os" "path/filepath" "reflect" "slices" "strconv" "strings" "time" "golang.org/x/text/cases" "golang.org/x/text/language" "sigs.k8s.io/yaml" "github.com/BurntSushi/toml" "github.com/docker/docker/api/types/container" "github.com/docker/go-units" "github.com/sirupsen/logrus" api "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/util/intstr" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/common/config/runner" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" "gitlab.com/gitlab-org/gitlab-runner/helpers/timeperiod" "gitlab.com/gitlab-org/gitlab-runner/referees" ) type ( DockerPullPolicy = spec.PullPolicy DockerSysCtls map[string]string ) type KubernetesHookHandlerType string const ( PullPolicyAlways = "always" PullPolicyNever = "never" PullPolicyIfNotPresent = "if-not-present" DNSPolicyNone KubernetesDNSPolicy = "none" DNSPolicyDefault KubernetesDNSPolicy = "default" DNSPolicyClusterFirst KubernetesDNSPolicy = "cluster-first" DNSPolicyClusterFirstWithHostNet KubernetesDNSPolicy = "cluster-first-with-host-net" GenerateArtifactsMetadataVariable = "RUNNER_GENERATE_ARTIFACTS_METADATA" UnknownSystemID = "unknown" DefaultConnectionMaxAge = 15 * time.Minute ) const mask = "[MASKED]" var ( errPatchConversion = errors.New("converting patch to json") errPatchAmbiguous = errors.New("ambiguous patch: both patch path and patch provided") errPatchFileFail = errors.New("loading patch file") ) // InvalidTimePeriodsError represents that the time period specified is not valid. type InvalidTimePeriodsError struct { periods []string cause error } func NewInvalidTimePeriodsError(periods []string, cause error) *InvalidTimePeriodsError { return &InvalidTimePeriodsError{periods: periods, cause: cause} } func (e *InvalidTimePeriodsError) Error() string { return fmt.Sprintf("invalid time periods %v, caused by: %v", e.periods, e.cause) } func (e *InvalidTimePeriodsError) Is(err error) bool { _, ok := err.(*InvalidTimePeriodsError) return ok } func (e *InvalidTimePeriodsError) Unwrap() error { return e.cause } // GetPullPolicies returns a validated list of pull policies, falling back to a predefined value if empty, // or returns an error if the list is not valid func (c DockerConfig) GetPullPolicies() ([]DockerPullPolicy, error) { // Default policy is always if len(c.PullPolicy) == 0 { return []DockerPullPolicy{PullPolicyAlways}, nil } // Verify pull policies policies := make([]DockerPullPolicy, len(c.PullPolicy)) for idx, p := range c.PullPolicy { switch p { case PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever: policies[idx] = DockerPullPolicy(p) default: return []DockerPullPolicy{}, fmt.Errorf("unsupported pull_policy config: %q", p) } } return policies, nil } // GetAllowedPullPolicies returns a validated list of allowed pull policies, // falling back to a predefined value if empty, or returns an error if the list is not valid func (c DockerConfig) GetAllowedPullPolicies() ([]DockerPullPolicy, error) { if len(c.AllowedPullPolicies) == 0 { return c.GetPullPolicies() } // Verify allowed pull policies policies := make([]DockerPullPolicy, len(c.AllowedPullPolicies)) for idx, p := range c.AllowedPullPolicies { switch p { case PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever: policies[idx] = p default: return []DockerPullPolicy{}, fmt.Errorf("unsupported allowed_pull_policies config: %q", p) } } return policies, nil } func (c DockerConfig) IsUserAllowed(user string) bool { // default image user is allowed. if user == "" { return true } // if neither a user nor allowed-users have been specified in the runner config, any user is allowed. if len(c.AllowedUsers) == 0 && c.User == "" { return true } // if allowed-users was not configured, it defaults to the single user configured in the runner. allowedUsers := c.AllowedUsers if len(allowedUsers) == 0 { allowedUsers = []string{c.User} } return slices.Contains(allowedUsers, user) } func (c KubernetesConfig) GetAllowedPullPolicies() ([]api.PullPolicy, error) { if len(c.AllowedPullPolicies) == 0 { return c.GetPullPolicies() } // Verify allowed pull policies pullPolicies, err := c.ConvertFromDockerPullPolicy(c.AllowedPullPolicies) if err != nil { return nil, fmt.Errorf("allowed_pull_policies config: %w", err) } return pullPolicies, nil } type allowListKind string const ( allowListKindUser allowListKind = "user" allowListKindGroup allowListKind = "group" ) // parseID parses a numeric UID/GID string into an int64. func parseID(s string) (int64, error) { return strconv.ParseInt(s, 10, 64) } // allowListContainsID reports whether any entry in allowedList parses to the given id. // Non-numeric entries are logged as a warning and skipped. func allowListContainsID(id int64, kind allowListKind, allowedList []string) bool { for _, entry := range allowedList { entryID, err := parseID(entry) if err != nil { logrus.Warningf("ignoring non-numeric %s allowlist entry %q", kind, entry) continue } if entryID == id { return true } } return false } func (c KubernetesConfig) isUserOrGroupAllowed(idStr string, kind allowListKind, allowedList []string) error { // default image user is allowed. if idStr == "" { return nil } id, err := parseID(idStr) if err != nil { return fmt.Errorf("%s %q is invalid: %w", kind, idStr, err) } // Root requires explicit permission in allowlist, even if allowlist is empty. // Compare numerically so that "00", "000", etc. are all treated as UID/GID 0. if id == 0 { if allowListContainsID(0, kind, allowedList) { return nil } return fmt.Errorf("%s %q is not in the allowed list: %v", kind, idStr, allowedList) } // if no allowed-users/groups have been specified in the runner config, any non-root user is allowed. if len(allowedList) == 0 { return nil } if allowListContainsID(id, kind, allowedList) { return nil } return fmt.Errorf("%s %q is not in the allowed list: %v", kind, idStr, allowedList) } func (c KubernetesConfig) IsUserAllowed(user string) error { return c.isUserOrGroupAllowed(user, allowListKindUser, c.AllowedUsers) } func (c KubernetesConfig) IsGroupAllowed(group string) error { return c.isUserOrGroupAllowed(group, allowListKindGroup, c.AllowedGroups) } // StringOrArray implements UnmarshalTOML to unmarshal either a string or array of strings. type StringOrArray []string func (p *StringOrArray) UnmarshalTOML(data interface{}) error { switch v := data.(type) { case string: *p = StringOrArray{v} case []interface{}: for _, vv := range v { switch item := vv.(type) { case string: *p = append(*p, item) default: return fmt.Errorf( "cannot load value of type %s into a StringOrArray", reflect.TypeOf(item).String(), ) } } default: return fmt.Errorf("cannot load value of type %s into a StringOrArray", reflect.TypeOf(v).String()) } return nil } type DockerConfig struct { docker.Credentials Hostname string `toml:"hostname,omitempty" json:"hostname" long:"hostname" env:"DOCKER_HOSTNAME" description:"Custom container hostname"` Image string `toml:"image" json:"image" long:"image" env:"DOCKER_IMAGE" description:"Docker image to be used"` Runtime string `toml:"runtime,omitempty" json:"runtime" long:"runtime" env:"DOCKER_RUNTIME" description:"Docker runtime to be used"` Memory string `toml:"memory,omitempty" json:"memory" long:"memory" env:"DOCKER_MEMORY" description:"Memory limit (format: []). Unit can be one of b, k, m, or g. Minimum is 4M."` MemorySwap string `toml:"memory_swap,omitempty" json:"memory_swap" long:"memory-swap" env:"DOCKER_MEMORY_SWAP" description:"Total memory limit (memory + swap, format: []). Unit can be one of b, k, m, or g."` MemoryReservation string `toml:"memory_reservation,omitempty" json:"memory_reservation" long:"memory-reservation" env:"DOCKER_MEMORY_RESERVATION" description:"Memory soft limit (format: []). Unit can be one of b, k, m, or g."` CgroupParent string `toml:"cgroup_parent,omitempty" json:"cgroup_parent" long:"cgroup-parent" env:"DOCKER_CGROUP_PARENT" description:"String value containing the cgroup parent to use"` CPUSetCPUs string `toml:"cpuset_cpus,omitempty" json:"cpuset_cpus" long:"cpuset-cpus" env:"DOCKER_CPUSET_CPUS" description:"String value containing the cgroups CpusetCpus to use"` CPUSetMems string `toml:"cpuset_mems,omitempty" json:"cpuset_mems" long:"cpuset-mems" env:"DOCKER_CPUSET_MEMS" description:"String value containing the cgroups CpusetMems to use"` CPUS string `toml:"cpus,omitempty" json:"cpus" long:"cpus" env:"DOCKER_CPUS" description:"Number of CPUs"` CPUShares int64 `toml:"cpu_shares,omitzero" json:"cpu_shares" long:"cpu-shares" env:"DOCKER_CPU_SHARES" description:"Number of CPU shares"` DNS []string `toml:"dns,omitempty" json:"dns,omitempty" long:"dns" env:"DOCKER_DNS" description:"A list of DNS servers for the container to use"` DNSSearch []string `toml:"dns_search,omitempty" json:"dns_search,omitempty" long:"dns-search" env:"DOCKER_DNS_SEARCH" description:"A list of DNS search domains"` Privileged bool `toml:"privileged,omitzero" json:"privileged" long:"privileged" env:"DOCKER_PRIVILEGED" description:"Give extended privileges to container"` ServicesPrivileged *bool `toml:"services_privileged,omitempty" json:"services_privileged,omitempty" long:"services_privileged" env:"DOCKER_SERVICES_PRIVILEGED" description:"When set this will give or remove extended privileges to container services"` DisableEntrypointOverwrite bool `toml:"disable_entrypoint_overwrite,omitzero" json:"disable_entrypoint_overwrite" long:"disable-entrypoint-overwrite" env:"DOCKER_DISABLE_ENTRYPOINT_OVERWRITE" description:"Disable the possibility for a container to overwrite the default image entrypoint"` User string `toml:"user,omitempty" json:"user" long:"user" env:"DOCKER_USER" description:"Run all commands in the container as the specified user."` AllowedUsers []string `toml:"allowed_users,omitempty" json:"allowed_users,omitempty" long:"allowed_users" env:"DOCKER_ALLOWED_USERS" description:"List of allowed users under which to run commands in the build container."` GroupAdd []string `toml:"group_add" json:"group_add,omitempty" long:"group-add" env:"DOCKER_GROUP_ADD" description:"Add additional groups to join"` UsernsMode string `toml:"userns_mode,omitempty" json:"userns_mode" long:"userns" env:"DOCKER_USERNS_MODE" description:"User namespace to use"` CapAdd []string `toml:"cap_add" json:"cap_add,omitempty" long:"cap-add" env:"DOCKER_CAP_ADD" description:"Add Linux capabilities"` CapDrop []string `toml:"cap_drop" json:"cap_drop,omitempty" long:"cap-drop" env:"DOCKER_CAP_DROP" description:"Drop Linux capabilities"` OomKillDisable bool `toml:"oom_kill_disable,omitzero" json:"oom_kill_disable" long:"oom-kill-disable" env:"DOCKER_OOM_KILL_DISABLE" description:"Do not kill processes in a container if an out-of-memory (OOM) error occurs"` OomScoreAdjust int `toml:"oom_score_adjust,omitzero" json:"oom_score_adjust" long:"oom-score-adjust" env:"DOCKER_OOM_SCORE_ADJUST" description:"Adjust OOM score"` SecurityOpt []string `toml:"security_opt" json:"security_opt,omitempty" long:"security-opt" env:"DOCKER_SECURITY_OPT" description:"Security Options"` ServicesSecurityOpt []string `toml:"services_security_opt" json:"services_security_opt,omitempty" long:"services-security-opt" env:"DOCKER_SERVICES_SECURITY_OPT" description:"Security Options for container services"` Devices []string `toml:"devices" json:"devices,omitempty" long:"devices" env:"DOCKER_DEVICES" description:"Add a host device to the container"` DeviceCgroupRules []string `toml:"device_cgroup_rules,omitempty" json:"device_cgroup_rules,omitempty" long:"device-cgroup-rules" env:"DOCKER_DEVICE_CGROUP_RULES" description:"Add a device cgroup rule to the container"` Gpus string `toml:"gpus,omitempty" json:"gpus" long:"gpus" env:"DOCKER_GPUS" description:"Request GPUs to be used by Docker"` ServicesDevices map[string][]string `toml:"services_devices,omitempty" json:"services_devices,omitempty" long:"services_devices" env:"DOCKER_SERVICES_DEVICES" description:"A toml table/json object with the format key=values. Expose host devices to services based on image name."` DisableCache bool `toml:"disable_cache,omitzero" json:"disable_cache" long:"disable-cache" env:"DOCKER_DISABLE_CACHE" description:"Disable all container caching"` Volumes []string `toml:"volumes,omitempty" json:"volumes,omitempty" long:"volumes" env:"DOCKER_VOLUMES" description:"Bind-mount a volume and create it if it doesn't exist prior to mounting. Can be specified multiple times once per mountpoint, e.g. --docker-volumes 'test0:/test0' --docker-volumes 'test1:/test1'"` VolumeKeep bool `toml:"volume_keep,omitzero" json:"volume_keep" long:"volume-keep" env:"DOCKER_VOLUME_KEEP" description:"Do not delete volumes on container removal. Enabling can lead to increase in storage"` VolumeDriver string `toml:"volume_driver,omitempty" json:"volume_driver" long:"volume-driver" env:"DOCKER_VOLUME_DRIVER" description:"Volume driver to be used"` VolumeDriverOps map[string]string `toml:"volume_driver_ops,omitempty" json:"volume_driver_ops,omitempty" long:"volume-driver-ops" env:"DOCKER_VOLUME_DRIVER_OPS" description:"A toml table/json object with the format key=values. Volume driver ops to be specified"` CacheDir string `toml:"cache_dir,omitempty" json:"cache_dir" long:"cache-dir" env:"DOCKER_CACHE_DIR" description:"Directory where to store caches"` ExtraHosts []string `toml:"extra_hosts,omitempty" json:"extra_hosts,omitempty" long:"extra-hosts" env:"DOCKER_EXTRA_HOSTS" description:"Add a custom host-to-IP mapping"` VolumesFrom []string `toml:"volumes_from,omitempty" json:"volumes_from,omitempty" long:"volumes-from" env:"DOCKER_VOLUMES_FROM" description:"A list of volumes to inherit from another container"` NetworkMode string `toml:"network_mode,omitempty" json:"network_mode" long:"network-mode" env:"DOCKER_NETWORK_MODE" description:"Add container to a custom network"` IpcMode string `toml:"ipcmode,omitempty" json:"ipcmode" long:"ipcmode" env:"DOCKER_IPC_MODE" description:"Select IPC mode for container"` MacAddress string `toml:"mac_address,omitempty" json:"mac_address" long:"mac-address" env:"DOCKER_MAC_ADDRESS" description:"Container MAC address (e.g., 92:d0:c6:0a:29:33)"` Links []string `toml:"links,omitempty" json:"links,omitempty" long:"links" env:"DOCKER_LINKS" description:"Add link to another container"` Services []Service `toml:"services,omitempty" json:"services,omitempty" description:"Add service that is started with container"` ServicesLimit *int `toml:"services_limit,omitempty" json:"services_limit,omitempty" long:"services-limit" env:"DOCKER_SERVICES_LIMIT" description:"The maximum amount of services allowed"` ServiceMemory string `toml:"service_memory,omitempty" json:"service_memory" long:"service-memory" env:"DOCKER_SERVICE_MEMORY" description:"Service memory limit (format: []). Unit can be one of b (if omitted), k, m, or g. Minimum is 4M."` ServiceMemorySwap string `toml:"service_memory_swap,omitempty" json:"service_memory_swap" long:"service-memory-swap" env:"DOCKER_SERVICE_MEMORY_SWAP" description:"Service total memory limit (memory + swap, format: []). Unit can be one of b (if omitted), k, m, or g."` ServiceMemoryReservation string `toml:"service_memory_reservation,omitempty" json:"service_memory_reservation" long:"service-memory-reservation" env:"DOCKER_SERVICE_MEMORY_RESERVATION" description:"Service memory soft limit (format: []). Unit can be one of b (if omitted), k, m, or g."` ServiceCgroupParent string `toml:"service_cgroup_parent,omitempty" json:"service_cgroup_parent" long:"service-cgroup-parent" env:"DOCKER_SERVICE_CGROUP_PARENT" description:"String value containing the cgroup parent to use for service"` ServiceSlotCgroupTemplate string `toml:"service_slot_cgroup_template,omitempty" json:"service_slot_cgroup_template" long:"service-slot-cgroup-template" env:"DOCKER_SERVICE_SLOT_CGROUP_TEMPLATE" description:"Template for service slot-derived cgroup names (use ${slot} placeholder)"` ServiceCPUSetCPUs string `toml:"service_cpuset_cpus,omitempty" json:"service_cpuset_cpus" long:"service-cpuset-cpus" env:"DOCKER_SERVICE_CPUSET_CPUS" description:"String value containing the cgroups CpusetCpus to use for service"` ServiceCPUS string `toml:"service_cpus,omitempty" json:"service_cpus" long:"service-cpus" env:"DOCKER_SERVICE_CPUS" description:"Number of CPUs for service"` ServiceCPUShares int64 `toml:"service_cpu_shares,omitzero" json:"service_cpu_shares" long:"service-cpu-shares" env:"DOCKER_SERVICE_CPU_SHARES" description:"Number of CPU shares for service"` ServiceGpus string `toml:"service_gpus,omitempty" json:"service_gpus" long:"service_gpus" env:"DOCKER_SERVICE_GPUS" description:"Request GPUs to be used by Docker for services"` WaitForServicesTimeout int `toml:"wait_for_services_timeout,omitzero" json:"wait_for_services_timeout" long:"wait-for-services-timeout" env:"DOCKER_WAIT_FOR_SERVICES_TIMEOUT" description:"How long to wait for service startup"` AllowedImages []string `toml:"allowed_images,omitempty" json:"allowed_images,omitempty" long:"allowed-images" env:"DOCKER_ALLOWED_IMAGES" description:"Image allowlist"` AllowedPrivilegedImages []string `toml:"allowed_privileged_images,omitempty" json:"allowed_privileged_images,omitempty" long:"allowed-privileged-images" env:"DOCKER_ALLOWED_PRIVILEGED_IMAGES" description:"Privileged image allowlist"` AllowedPrivilegedServices []string `toml:"allowed_privileged_services,omitempty" json:"allowed_privileged_services,omitempty" long:"allowed-privileged-services" env:"DOCKER_ALLOWED_PRIVILEGED_SERVICES" description:"Privileged Service allowlist"` AllowedPullPolicies []DockerPullPolicy `toml:"allowed_pull_policies,omitempty" json:"allowed_pull_policies,omitempty" long:"allowed-pull-policies" env:"DOCKER_ALLOWED_PULL_POLICIES" description:"Pull policy allowlist"` AllowedServices []string `toml:"allowed_services,omitempty" json:"allowed_services,omitempty" long:"allowed-services" env:"DOCKER_ALLOWED_SERVICES" description:"Service allowlist"` PullPolicy StringOrArray `toml:"pull_policy,omitempty" json:"pull_policy,omitempty" long:"pull-policy" env:"DOCKER_PULL_POLICY" description:"Image pull policy: never, if-not-present, always"` Isolation string `toml:"isolation,omitempty" json:"isolation" long:"isolation" env:"DOCKER_ISOLATION" description:"Container isolation technology. Windows only"` ShmSize int64 `toml:"shm_size,omitempty" json:"shm_size" long:"shm-size" env:"DOCKER_SHM_SIZE" description:"Shared memory size for docker images (in bytes)"` Tmpfs map[string]string `toml:"tmpfs,omitempty" json:"tmpfs,omitempty" long:"tmpfs" env:"DOCKER_TMPFS" description:"A toml table/json object with the format key=values. When set this will mount the specified path in the key as a tmpfs volume in the main container, using the options specified as key. For the supported options, see the documentation for the unix 'mount' command"` ServicesTmpfs map[string]string `toml:"services_tmpfs,omitempty" json:"services_tmpfs,omitempty" long:"services-tmpfs" env:"DOCKER_SERVICES_TMPFS" description:"A toml table/json object with the format key=values. When set this will mount the specified path in the key as a tmpfs volume in all the service containers, using the options specified as key. For the supported options, see the documentation for the unix 'mount' command"` SysCtls DockerSysCtls `toml:"sysctls,omitempty" json:"sysctls,omitempty" long:"sysctls" env:"DOCKER_SYSCTLS" description:"Sysctl options, a toml table/json object of key=value. Value is expected to be a string."` HelperImage string `toml:"helper_image,omitempty" json:"helper_image" long:"helper-image" env:"DOCKER_HELPER_IMAGE" description:"[ADVANCED] Override the default helper image used to clone repos and upload artifacts"` HelperImageFlavor string `toml:"helper_image_flavor,omitempty" json:"helper_image_flavor" long:"helper-image-flavor" env:"DOCKER_HELPER_IMAGE_FLAVOR" description:"Set helper image flavor (alpine, ubuntu), defaults to alpine"` ContainerLabels map[string]string `toml:"container_labels,omitempty" json:"container_labels,omitempty" long:"container-labels" description:"A toml table/json object of key-value. Value is expected to be a string. When set, this will create containers with the given container labels. Environment variables will be substituted for values here."` EnableIPv6 bool `toml:"enable_ipv6,omitempty" json:"enable_ipv6" long:"enable-ipv6" description:"Enable IPv6 for automatically created networks. This is only takes affect when the feature flag FF_NETWORK_PER_BUILD is enabled."` Ulimit map[string]string `toml:"ulimit,omitempty" json:"ulimit,omitempty" long:"ulimit" env:"DOCKER_ULIMIT" description:"Ulimit options for container"` NetworkMTU int `toml:"network_mtu,omitempty" json:"network_mtu" long:"network-mtu" description:"MTU of the Docker network created for the job IFF the FF_NETWORK_PER_BUILD feature-flag was specified."` LogOptions map[string]string `toml:"log_options,omitempty" json:"log_options,omitempty" long:"log-options" env:"DOCKER_LOG_OPTIONS" description:"Log driver options for json-file logging"` } type InstanceConfig struct { AllowedImages []string `toml:"allowed_images,omitempty" json:",omitempty" description:"When VM Isolation is enabled, allowed images controls which images a job is allowed to specify"` UseCommonBuildDir bool `toml:"use_common_build_dir,omitempty" json:"use_common_build_dir,omitempty" description:"When use common build dir is enabled, all jobs will use the same build directory. This can only be enabled when VM isolation is enabled or a max use count is 1."` } type AutoscalerConfig struct { CapacityPerInstance int `toml:"capacity_per_instance,omitempty"` MaxUseCount int `toml:"max_use_count,omitempty"` MaxInstances int `toml:"max_instances,omitempty"` Plugin string `toml:"plugin,omitempty"` PluginConfig AutoscalerSettingsMap `toml:"plugin_config,omitempty"` ConnectorConfig ConnectorConfig `toml:"connector_config,omitempty"` Policy []AutoscalerPolicyConfig `toml:"policy,omitempty" json:",omitempty"` InstanceReadyCommand string `toml:"instance_ready_command,omitempty" json:",omitempty"` InstanceAcquireTimeout time.Duration `toml:"instance_acquire_timeout,omitempty" json:",omitempty"` UpdateInterval time.Duration `toml:"update_interval,omitempty" json:",omitempty"` UpdateIntervalWhenExpecting time.Duration `toml:"update_interval_when_expecting,omitempty" json:",omitempty"` DeletionRetryInterval time.Duration `toml:"deletion_retry_interval,omitempty" json:",omitempty"` ShutdownDeletionInterval time.Duration `toml:"shutdown_deletion_interval,omitempty" json:",omitempty"` ShutdownDeletionRetries int `toml:"shutdown_deletion_retries,omitempty" json:",omitempty"` FailureThreshold int `toml:"failure_threshold,omitempty" json:",omitempty"` ScaleThrottle AutoscalerScaleThrottle `toml:"scale_throttle,omitempty" json:",omitempty"` ReservationThrottling *bool `toml:"reservation_throttling,omitempty" json:",omitempty"` LogInternalIP bool `toml:"log_internal_ip,omitempty" json:",omitempty"` LogExternalIP bool `toml:"log_external_ip,omitempty" json:",omitempty"` DeleteInstancesOnShutdown bool `toml:"delete_instances_on_shutdown,omitempty" json:",omitempty"` VMIsolation VMIsolation `toml:"vm_isolation,omitempty"` StateStorage AutoscalerStateStorage `toml:"state_storage,omitempty" json:",omitempty"` // instance_operation_time_buckets was introduced some time ago, so we can't just delete it. // Someone can already depend on that setting. // Instead, it's now used as a way to define "default" buckets for the different operation // types, and more specific settings can be used to adjust what's needed to be adjusted. InstanceOperationTimeBuckets []float64 `toml:"instance_operation_time_buckets,omitempty" json:",omitempty"` InstanceCreationTimeBuckets []float64 `toml:"instance_creation_time_buckets,omitempty" json:",omitempty"` InstanceIsRunningTimeBuckets []float64 `toml:"instance_is_running_time_buckets,omitempty" json:",omitempty"` InstanceDeletionTimeBuckets []float64 `toml:"instance_deletion_time_buckets,omitempty" json:",omitempty"` InstanceReadinessTimeBuckets []float64 `toml:"instance_readiness_time_buckets,omitempty" json:",omitempty"` InstanceLifeDurationBuckets []float64 `toml:"instance_life_duration_buckets,omitempty" json:",omitempty"` } type AutoscalerStateStorage struct { Enabled bool `toml:"enabled,omitempty" json:",omitempty"` Dir string `toml:"dir,omitempty" json:",omitempty"` KeepInstanceWithAcquisitions bool `toml:"keep_instance_with_acquisitions,omitempty" json:",omitempty"` } type AutoscalerScaleThrottle struct { Limit int `toml:"limit,omitempty" json:",omitempty"` Burst int `toml:"burst,omitempty" json:",omitempty"` } func (c AutoscalerConfig) GetInstanceCreationTimeBuckets() []float64 { if len(c.InstanceCreationTimeBuckets) > 0 { return c.InstanceCreationTimeBuckets } return c.InstanceOperationTimeBuckets } func (c AutoscalerConfig) GetInstanceIsRunningTimeBuckets() []float64 { if len(c.InstanceIsRunningTimeBuckets) > 0 { return c.InstanceIsRunningTimeBuckets } return c.InstanceOperationTimeBuckets } func (c AutoscalerConfig) GetInstanceDeletionTimeBuckets() []float64 { if len(c.InstanceDeletionTimeBuckets) > 0 { return c.InstanceDeletionTimeBuckets } return c.InstanceOperationTimeBuckets } func (c AutoscalerConfig) GetInstanceReadinessTimeBuckets() []float64 { if len(c.InstanceReadinessTimeBuckets) > 0 { return c.InstanceReadinessTimeBuckets } return c.InstanceOperationTimeBuckets } type VMIsolation struct { Enabled bool `toml:"enabled,omitempty"` NestingHost string `toml:"nesting_host,omitempty"` NestingConfig AutoscalerSettingsMap `toml:"nesting_config,omitempty" json:",omitempty"` Image string `toml:"image,omitempty"` ConnectorConfig ConnectorConfig `toml:"connector_config,omitempty"` } type ConnectorConfig struct { OS string `toml:"os,omitempty"` Arch string `toml:"arch,omitempty"` Protocol string `toml:"protocol,omitempty"` ProtocolPort int `toml:"protocol_port,omitempty"` Username string `toml:"username,omitempty"` Password string `toml:"password,omitempty"` KeyPathname string `toml:"key_path,omitempty"` UseStaticCredentials bool `toml:"use_static_credentials,omitempty"` Keepalive time.Duration `toml:"keepalive,omitempty"` Timeout time.Duration `toml:"timeout,omitempty"` UseExternalAddr bool `toml:"use_external_addr,omitempty"` } type AutoscalerSettingsMap map[string]interface{} func (settings AutoscalerSettingsMap) JSON() ([]byte, error) { return json.Marshal(settings) } type AutoscalerPolicyConfig struct { Periods []string `toml:"periods,omitempty" json:",omitempty"` Timezone string `toml:"timezone,omitempty"` IdleCount int `toml:"idle_count,omitempty"` IdleTime time.Duration `toml:"idle_time,omitempty" json:",omitempty" jsonschema:"minimum=1000000000"` ScaleFactor float64 `toml:"scale_factor,omitempty"` ScaleFactorLimit int `toml:"scale_factor_limit,omitempty"` PreemptiveMode *bool `toml:"preemptive_mode,omitempty"` } func (policy *AutoscalerPolicyConfig) PreemptiveModeEnabled() bool { if policy.PreemptiveMode == nil { return policy.IdleCount > 0 } return *policy.PreemptiveMode } type DockerMachine struct { MaxGrowthRate int `toml:"MaxGrowthRate,omitzero" long:"max-growth-rate" env:"MACHINE_MAX_GROWTH_RATE" description:"Maximum machines being provisioned concurrently, set to 0 for unlimited"` IdleCount int `long:"idle-nodes" env:"MACHINE_IDLE_COUNT" description:"Maximum idle machines"` IdleScaleFactor float64 `long:"idle-scale-factor" env:"MACHINE_IDLE_SCALE_FACTOR" description:"(Experimental) Defines what factor of in-use machines should be used as current idle value, but never more then defined IdleCount. 0.0 means use IdleCount as a static number (defaults to 0.0). Must be defined as float number."` IdleCountMin int `long:"idle-count-min" env:"MACHINE_IDLE_COUNT_MIN" description:"Minimal number of idle machines when IdleScaleFactor is in use. Defaults to 1."` IdleTime int `toml:"IdleTime,omitzero" long:"idle-time" env:"MACHINE_IDLE_TIME" description:"Minimum time after node can be destroyed"` MaxBuilds int `toml:"MaxBuilds,omitzero" long:"max-builds" env:"MACHINE_MAX_BUILDS" description:"Maximum number of builds processed by machine"` MachineDriver string `long:"machine-driver" env:"MACHINE_DRIVER" description:"The driver to use when creating machine"` MachineName string `long:"machine-name" env:"MACHINE_NAME" description:"The template for machine name (needs to include %s)"` MachineOptions []string `long:"machine-options" json:",omitempty" env:"MACHINE_OPTIONS" description:"Additional machine creation options"` MachineOptionsWithName []string `long:"machine-options-with-name" json:",omitempty" env:"MACHINE_OPTIONS_WITH_NAME" description:"Template for additional options that may reference the machine name (need to include %s)"` OffPeakPeriods []string `toml:"OffPeakPeriods,omitempty" json:",omitempty" description:"Time periods when the scheduler is in the OffPeak mode. DEPRECATED"` // DEPRECATED OffPeakTimezone string `toml:"OffPeakTimezone,omitempty" description:"Timezone for the OffPeak periods (defaults to Local). DEPRECATED"` // DEPRECATED OffPeakIdleCount int `toml:"OffPeakIdleCount,omitzero" description:"Maximum idle machines when the scheduler is in the OffPeak mode. DEPRECATED"` // DEPRECATED OffPeakIdleTime int `toml:"OffPeakIdleTime,omitzero" description:"Minimum time after machine can be destroyed when the scheduler is in the OffPeak mode. DEPRECATED"` // DEPRECATED AutoscalingConfigs []*DockerMachineAutoscaling `toml:"autoscaling" json:",omitempty" description:"Ordered list of configurations for autoscaling periods (last match wins)"` } type DockerMachineShutdownDrain struct { Enabled bool `toml:"enabled,omitempty" json:"enabled,omitempty" description:"Enable draining idle machines on shutdown (default: false)"` Concurrency int `toml:"concurrency,omitempty" json:"concurrency,omitempty" description:"Number of concurrent machines to remove during shutdown drain (default: 3)"` MaxRetries int `toml:"max_retries,omitempty" json:"max_retries,omitempty" description:"Maximum number of retries for removing a machine during drain (default: 3)"` RetryBackoff time.Duration `toml:"retry_backoff,omitempty" json:"retry_backoff,omitempty" description:"Base backoff duration between retries during drain (default: 5s)"` } type DockerMachineAutoscaling struct { Periods []string `long:"periods" json:",omitempty" description:"List of crontab expressions for this autoscaling configuration"` Timezone string `long:"timezone" description:"Timezone for the periods (defaults to Local)"` IdleCount int `long:"idle-count" description:"Maximum idle machines when this configuration is active"` IdleScaleFactor float64 `long:"idle-scale-factor" description:"(Experimental) Defines what factor of in-use machines should be used as current idle value, but never more then defined IdleCount. 0.0 means use IdleCount as a static number (defaults to 0.0). Must be defined as float number."` IdleCountMin int `long:"idle-count-min" description:"Minimal number of idle machines when IdleScaleFactor is in use. Defaults to 1."` IdleTime int `long:"idle-time" description:"Minimum time after which and idle machine can be destroyed when this configuration is active"` compiledPeriods *timeperiod.TimePeriod } type ParallelsConfig struct { BaseName string `toml:"base_name" json:"base_name" long:"base-name" env:"PARALLELS_BASE_NAME" description:"VM name to be used"` TemplateName string `toml:"template_name,omitempty" json:"template_name" long:"template-name" env:"PARALLELS_TEMPLATE_NAME" description:"VM template to be created"` DisableSnapshots bool `toml:"disable_snapshots,omitzero" json:"disable_snapshots" long:"disable-snapshots" env:"PARALLELS_DISABLE_SNAPSHOTS" description:"Disable snapshoting to speedup VM creation"` TimeServer string `toml:"time_server,omitempty" json:"time_server" long:"time-server" env:"PARALLELS_TIME_SERVER" description:"Timeserver to sync the guests time from. Defaults to time.apple.com"` AllowedImages []string `toml:"allowed_images,omitempty" json:"allowed_images,omitempty" long:"allowed-images" env:"PARALLELS_ALLOWED_IMAGES" description:"Image (base_name) allowlist"` } type VirtualBoxConfig struct { BaseName string `toml:"base_name" json:"base_name" long:"base-name" env:"VIRTUALBOX_BASE_NAME" description:"VM name to be used"` BaseSnapshot string `toml:"base_snapshot,omitempty" json:"base_snapshot" long:"base-snapshot" env:"VIRTUALBOX_BASE_SNAPSHOT" description:"Name or UUID of a specific VM snapshot to clone"` BaseFolder string `toml:"base_folder" json:"base_folder" long:"base-folder" env:"VIRTUALBOX_BASE_FOLDER" description:"Folder in which to save the new VM. If empty, uses VirtualBox default"` DisableSnapshots bool `toml:"disable_snapshots,omitzero" json:"disable_snapshots" long:"disable-snapshots" env:"VIRTUALBOX_DISABLE_SNAPSHOTS" description:"Disable snapshoting to speedup VM creation"` AllowedImages []string `toml:"allowed_images,omitempty" json:"allowed_images,omitempty" long:"allowed-images" env:"VIRTUALBOX_ALLOWED_IMAGES" description:"Image allowlist"` StartType string `toml:"start_type" json:"start_type" long:"start-type" env:"VIRTUALBOX_START_TYPE" description:"Graphical front-end type"` } type CustomConfig struct { ConfigExec string `toml:"config_exec,omitempty" json:"config_exec" long:"config-exec" env:"CUSTOM_CONFIG_EXEC" description:"Executable that allows to inject configuration values to the executor"` ConfigArgs []string `toml:"config_args,omitempty" json:"config_args,omitempty" long:"config-args" description:"Arguments for the config executable"` ConfigExecTimeout *int `toml:"config_exec_timeout,omitempty" json:"config_exec_timeout,omitempty" long:"config-exec-timeout" env:"CUSTOM_CONFIG_EXEC_TIMEOUT" description:"Timeout for the config executable (in seconds)"` PrepareExec string `toml:"prepare_exec,omitempty" json:"prepare_exec" long:"prepare-exec" env:"CUSTOM_PREPARE_EXEC" description:"Executable that prepares executor"` PrepareArgs []string `toml:"prepare_args,omitempty" json:"prepare_args,omitempty" long:"prepare-args" description:"Arguments for the prepare executable"` PrepareExecTimeout *int `toml:"prepare_exec_timeout,omitempty" json:"prepare_exec_timeout,omitempty" long:"prepare-exec-timeout" env:"CUSTOM_PREPARE_EXEC_TIMEOUT" description:"Timeout for the prepare executable (in seconds)"` RunExec string `toml:"run_exec" json:"run_exec" long:"run-exec" env:"CUSTOM_RUN_EXEC" description:"Executable that runs the job script in executor"` RunArgs []string `toml:"run_args,omitempty" json:"run_args,omitempty" long:"run-args" description:"Arguments for the run executable"` CleanupExec string `toml:"cleanup_exec,omitempty" json:"cleanup_exec" long:"cleanup-exec" env:"CUSTOM_CLEANUP_EXEC" description:"Executable that cleanups after executor run"` CleanupArgs []string `toml:"cleanup_args,omitempty" json:"cleanup_args,omitempty" long:"cleanup-args" description:"Arguments for the cleanup executable"` CleanupExecTimeout *int `toml:"cleanup_exec_timeout,omitempty" json:"cleanup_exec_timeout,omitempty" long:"cleanup-exec-timeout" env:"CUSTOM_CLEANUP_EXEC_TIMEOUT" description:"Timeout for the cleanup executable (in seconds)"` GracefulKillTimeout *int `toml:"graceful_kill_timeout,omitempty" json:"graceful_kill_timeout,omitempty" long:"graceful-kill-timeout" env:"CUSTOM_GRACEFUL_KILL_TIMEOUT" description:"Graceful timeout for scripts execution after SIGTERM is sent to the process (in seconds). This limits the time given for scripts to perform the cleanup before exiting"` ForceKillTimeout *int `toml:"force_kill_timeout,omitempty" json:"force_kill_timeout,omitempty" long:"force-kill-timeout" env:"CUSTOM_FORCE_KILL_TIMEOUT" description:"Force timeout for scripts execution (in seconds). Counted from the force kill call; if process will be not terminated, Runner will abandon process termination and log an error"` } // GetPullPolicies returns a validated list of pull policies, falling back to a predefined value if empty, // or returns an error if the list is not valid func (c KubernetesConfig) GetPullPolicies() ([]api.PullPolicy, error) { // Default to cluster pull policy if len(c.PullPolicy) == 0 { return []api.PullPolicy{""}, nil } // Verify pull policies policies := make([]DockerPullPolicy, len(c.PullPolicy)) for idx, policy := range c.PullPolicy { policies[idx] = DockerPullPolicy(policy) } pullPolicies, err := c.ConvertFromDockerPullPolicy(policies) if err != nil { return nil, fmt.Errorf("pull_policy config: %w", err) } return pullPolicies, nil } // ConvertFromDockerPullPolicy converts an array of DockerPullPolicy to an api.PullPolicy array // or returns an error if the list contains invalid pull policies. func (c KubernetesConfig) ConvertFromDockerPullPolicy(dockerPullPolicies []DockerPullPolicy) ([]api.PullPolicy, error) { policies := make([]api.PullPolicy, len(dockerPullPolicies)) for idx, policy := range dockerPullPolicies { switch policy { case "": policies[idx] = "" case PullPolicyAlways: policies[idx] = api.PullAlways case PullPolicyNever: policies[idx] = api.PullNever case PullPolicyIfNotPresent: policies[idx] = api.PullIfNotPresent default: return []api.PullPolicy{""}, fmt.Errorf("unsupported pull policy: %q", policy) } } return policies, nil } func (c *DockerConfig) GetUlimits() ([]*units.Ulimit, error) { ulimits := make([]*units.Ulimit, 0, len(c.Ulimit)) for tp, limits := range c.Ulimit { ulimit := units.Ulimit{ Name: tp, } before, after, ok := strings.Cut(limits, ":") var err error ulimit.Soft, err = strconv.ParseInt(before, 10, 64) if err != nil { return nil, fmt.Errorf("invalid soft limit value: %w", err) } ulimit.Hard = ulimit.Soft if ok { ulimit.Hard, err = strconv.ParseInt(after, 10, 64) if err != nil { return nil, fmt.Errorf("invalid soft limit value: %w", err) } } ulimits = append(ulimits, &ulimit) } return ulimits, nil } type KubernetesDNSPolicy string // Get returns one of the predefined values in kubernetes notation or an error if the value is not matched. // If the DNSPolicy is a blank string, returns the k8s default ("ClusterFirst") func (p KubernetesDNSPolicy) Get() (api.DNSPolicy, error) { const defaultPolicy = api.DNSClusterFirst switch p { case "": logrus.Debugf("DNSPolicy string is blank, using %q as default", defaultPolicy) return defaultPolicy, nil case DNSPolicyNone: return api.DNSNone, nil case DNSPolicyDefault: return api.DNSDefault, nil case DNSPolicyClusterFirst: return api.DNSClusterFirst, nil case DNSPolicyClusterFirstWithHostNet: return api.DNSClusterFirstWithHostNet, nil } return "", fmt.Errorf("unsupported kubernetes-dns-policy: %q", p) } type KubernetesHostAliasesFlag []KubernetesHostAliases func (h *KubernetesHostAliasesFlag) UnmarshalFlag(value string) error { return json.Unmarshal([]byte(value), h) } type KubernetesConfig struct { Host string `toml:"host" json:"host" long:"host" env:"KUBERNETES_HOST" description:"Optional Kubernetes master host URL (auto-discovery attempted if not specified)"` Context string `toml:"context,omitempty" json:"context" long:"context" env:"KUBECTL_CONTEXT" description:"Optional Kubernetes context name to use if host is not specified (kubectl config get-contexts)."` CertFile string `toml:"cert_file,omitempty" json:"cert_file" long:"cert-file" env:"KUBERNETES_CERT_FILE" description:"Optional Kubernetes master auth certificate"` KeyFile string `toml:"key_file,omitempty" json:"key_file" long:"key-file" env:"KUBERNETES_KEY_FILE" description:"Optional Kubernetes master auth private key"` CAFile string `toml:"ca_file,omitempty" json:"ca_file" long:"ca-file" env:"KUBERNETES_CA_FILE" description:"Optional Kubernetes master auth ca certificate"` BearerTokenOverwriteAllowed bool `toml:"bearer_token_overwrite_allowed" json:"bearer_token_overwrite_allowed" long:"bearer_token_overwrite_allowed" env:"KUBERNETES_BEARER_TOKEN_OVERWRITE_ALLOWED" description:"Bool to authorize builds to specify their own bearer token for creation."` BearerToken string `toml:"bearer_token,omitempty" json:"bearer_token" long:"bearer_token" env:"KUBERNETES_BEARER_TOKEN" description:"Optional Kubernetes service account token used to start build pods."` Image string `toml:"image" json:"image" long:"image" env:"KUBERNETES_IMAGE" description:"Default docker image to use for builds when none is specified"` Namespace string `toml:"namespace" json:"namespace" long:"namespace" env:"KUBERNETES_NAMESPACE" description:"Namespace to run Kubernetes jobs in"` NamespaceOverwriteAllowed string `toml:"namespace_overwrite_allowed" json:"namespace_overwrite_allowed" long:"namespace_overwrite_allowed" env:"KUBERNETES_NAMESPACE_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_NAMESPACE_OVERWRITE' value"` NamespacePerJob bool `toml:"namespace_per_job" json:"namespace_per_job" long:"namespace_per_job" env:"KUBERNETES_NAMESPACE_PER_JOB" description:"Use separate namespace for each job. If set, 'KUBERNETES_NAMESPACE' and 'KUBERNETES_NAMESPACE_OVERWRITE_ALLOWED' are ignored."` Privileged *bool `toml:"privileged,omitzero" json:"privileged,omitempty" long:"privileged" env:"KUBERNETES_PRIVILEGED" description:"Run all containers with the privileged flag enabled"` RuntimeClassName *string `toml:"runtime_class_name,omitempty" json:"runtime_class_name,omitempty" long:"runtime-class-name" env:"KUBERNETES_RUNTIME_CLASS_NAME" description:"A Runtime Class to use for all created pods, errors if the feature is unsupported by the cluster"` AllowPrivilegeEscalation *bool `toml:"allow_privilege_escalation,omitzero" json:"allow_privilege_escalation,omitempty" long:"allow-privilege-escalation" env:"KUBERNETES_ALLOW_PRIVILEGE_ESCALATION" description:"Run all containers with the security context allowPrivilegeEscalation flag enabled. When empty, it does not define the allowPrivilegeEscalation flag in the container SecurityContext and allows Kubernetes to use the default privilege escalation behavior."` CPULimit string `toml:"cpu_limit,omitempty" json:"cpu_limit" long:"cpu-limit" env:"KUBERNETES_CPU_LIMIT" description:"The CPU allocation given to build containers"` CPULimitOverwriteMaxAllowed string `toml:"cpu_limit_overwrite_max_allowed,omitempty" json:"cpu_limit_overwrite_max_allowed" long:"cpu-limit-overwrite-max-allowed" env:"KUBERNETES_CPU_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the cpu limit can be set to. Used with the KUBERNETES_CPU_LIMIT variable in the build."` CPURequest string `toml:"cpu_request,omitempty" json:"cpu_request" long:"cpu-request" env:"KUBERNETES_CPU_REQUEST" description:"The CPU allocation requested for build containers"` CPURequestOverwriteMaxAllowed string `toml:"cpu_request_overwrite_max_allowed,omitempty" json:"cpu_request_overwrite_max_allowed" long:"cpu-request-overwrite-max-allowed" env:"KUBERNETES_CPU_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the cpu request can be set to. Used with the KUBERNETES_CPU_REQUEST variable in the build."` MemoryLimit string `toml:"memory_limit,omitempty" json:"memory_limit" long:"memory-limit" env:"KUBERNETES_MEMORY_LIMIT" description:"The amount of memory allocated to build containers"` MemoryLimitOverwriteMaxAllowed string `toml:"memory_limit_overwrite_max_allowed,omitempty" json:"memory_limit_overwrite_max_allowed" long:"memory-limit-overwrite-max-allowed" env:"KUBERNETES_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the memory limit can be set to. Used with the KUBERNETES_MEMORY_LIMIT variable in the build."` MemoryRequest string `toml:"memory_request,omitempty" json:"memory_request" long:"memory-request" env:"KUBERNETES_MEMORY_REQUEST" description:"The amount of memory requested from build containers"` MemoryRequestOverwriteMaxAllowed string `toml:"memory_request_overwrite_max_allowed,omitempty" json:"memory_request_overwrite_max_allowed" long:"memory-request-overwrite-max-allowed" env:"KUBERNETES_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the memory request can be set to. Used with the KUBERNETES_MEMORY_REQUEST variable in the build."` EphemeralStorageLimit string `toml:"ephemeral_storage_limit,omitempty" json:"ephemeral_storage_limit" long:"ephemeral-storage-limit" env:"KUBERNETES_EPHEMERAL_STORAGE_LIMIT" description:"The amount of ephemeral storage allocated to build containers"` EphemeralStorageLimitOverwriteMaxAllowed string `toml:"ephemeral_storage_limit_overwrite_max_allowed,omitempty" json:"ephemeral_storage_limit_overwrite_max_allowed" long:"ephemeral-storage-limit-overwrite-max-allowed" env:"KUBERNETES_EPHEMERAL_STORAGE_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the ephemeral limit can be set to. Used with the KUBERNETES_EPHEMERAL_STORAGE_LIMIT variable in the build."` EphemeralStorageRequest string `toml:"ephemeral_storage_request,omitempty" json:"ephemeral_storage_request" long:"ephemeral-storage-request" env:"KUBERNETES_EPHEMERAL_STORAGE_REQUEST" description:"The amount of ephemeral storage requested from build containers"` EphemeralStorageRequestOverwriteMaxAllowed string `toml:"ephemeral_storage_request_overwrite_max_allowed,omitempty" json:"ephemeral_storage_request_overwrite_max_allowed" long:"ephemeral-storage-request-overwrite-max-allowed" env:"KUBERNETES_EPHEMERAL_STORAGE_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the ephemeral storage request can be set to. Used with the KUBERNETES_EPHEMERAL_STORAGE_REQUEST variable in the build."` ServiceCPULimit string `toml:"service_cpu_limit,omitempty" json:"service_cpu_limit" long:"service-cpu-limit" env:"KUBERNETES_SERVICE_CPU_LIMIT" description:"The CPU allocation given to build service containers"` ServiceCPULimitOverwriteMaxAllowed string `toml:"service_cpu_limit_overwrite_max_allowed,omitempty" json:"service_cpu_limit_overwrite_max_allowed" long:"service-cpu-limit-overwrite-max-allowed" env:"KUBERNETES_SERVICE_CPU_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service cpu limit can be set to. Used with the KUBERNETES_SERVICE_CPU_LIMIT variable in the build."` ServiceCPURequest string `toml:"service_cpu_request,omitempty" json:"service_cpu_request" long:"service-cpu-request" env:"KUBERNETES_SERVICE_CPU_REQUEST" description:"The CPU allocation requested for build service containers"` ServiceCPURequestOverwriteMaxAllowed string `toml:"service_cpu_request_overwrite_max_allowed,omitempty" json:"service_cpu_request_overwrite_max_allowed" long:"service-cpu-request-overwrite-max-allowed" env:"KUBERNETES_SERVICE_CPU_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service cpu request can be set to. Used with the KUBERNETES_SERVICE_CPU_REQUEST variable in the build."` ServiceMemoryLimit string `toml:"service_memory_limit,omitempty" json:"service_memory_limit" long:"service-memory-limit" env:"KUBERNETES_SERVICE_MEMORY_LIMIT" description:"The amount of memory allocated to build service containers"` ServiceMemoryLimitOverwriteMaxAllowed string `toml:"service_memory_limit_overwrite_max_allowed,omitempty" json:"service_memory_limit_overwrite_max_allowed" long:"service-memory-limit-overwrite-max-allowed" env:"KUBERNETES_SERVICE_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service memory limit can be set to. Used with the KUBERNETES_SERVICE_MEMORY_LIMIT variable in the build."` ServiceMemoryRequest string `toml:"service_memory_request,omitempty" json:"service_memory_request" long:"service-memory-request" env:"KUBERNETES_SERVICE_MEMORY_REQUEST" description:"The amount of memory requested for build service containers"` ServiceMemoryRequestOverwriteMaxAllowed string `toml:"service_memory_request_overwrite_max_allowed,omitempty" json:"service_memory_request_overwrite_max_allowed" long:"service-memory-request-overwrite-max-allowed" env:"KUBERNETES_SERVICE_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service memory request can be set to. Used with the KUBERNETES_SERVICE_MEMORY_REQUEST variable in the build."` ServiceEphemeralStorageLimit string `toml:"service_ephemeral_storage_limit,omitempty" json:"service_ephemeral_storage_limit" long:"service-ephemeral_storage-limit" env:"KUBERNETES_SERVICE_EPHEMERAL_STORAGE_LIMIT" description:"The amount of ephemeral storage allocated to build service containers"` ServiceEphemeralStorageLimitOverwriteMaxAllowed string `toml:"service_ephemeral_storage_limit_overwrite_max_allowed,omitempty" json:"service_ephemeral_storage_limit_overwrite_max_allowed" long:"service-ephemeral_storage-limit-overwrite-max-allowed" env:"KUBERNETES_SERVICE_EPHEMERAL_STORAGE_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service ephemeral storage limit can be set to. Used with the KUBERNETES_SERVICE_EPHEMERAL_STORAGE_LIMIT variable in the build."` ServiceEphemeralStorageRequest string `toml:"service_ephemeral_storage_request,omitempty" json:"service_ephemeral_storage_request" long:"service-ephemeral_storage-request" env:"KUBERNETES_SERVICE_EPHEMERAL_STORAGE_REQUEST" description:"The amount of ephemeral storage requested for build service containers"` ServiceEphemeralStorageRequestOverwriteMaxAllowed string `toml:"service_ephemeral_storage_request_overwrite_max_allowed,omitempty" json:"service_ephemeral_storage_request_overwrite_max_allowed" long:"service-ephemeral_storage-request-overwrite-max-allowed" env:"KUBERNETES_SERVICE_EPHEMERAL_STORAGE_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the service ephemeral storage request can be set to. Used with the KUBERNETES_SERVICE_EPHEMERAL_STORAGE_REQUEST variable in the build."` HelperCPULimit string `toml:"helper_cpu_limit,omitempty" json:"helper_cpu_limit" long:"helper-cpu-limit" env:"KUBERNETES_HELPER_CPU_LIMIT" description:"The CPU allocation given to build helper containers"` HelperCPULimitOverwriteMaxAllowed string `toml:"helper_cpu_limit_overwrite_max_allowed,omitempty" json:"helper_cpu_limit_overwrite_max_allowed" long:"helper-cpu-limit-overwrite-max-allowed" env:"KUBERNETES_HELPER_CPU_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper cpu limit can be set to. Used with the KUBERNETES_HELPER_CPU_LIMIT variable in the build."` HelperCPURequest string `toml:"helper_cpu_request,omitempty" json:"helper_cpu_request" long:"helper-cpu-request" env:"KUBERNETES_HELPER_CPU_REQUEST" description:"The CPU allocation requested for build helper containers"` HelperCPURequestOverwriteMaxAllowed string `toml:"helper_cpu_request_overwrite_max_allowed,omitempty" json:"helper_cpu_request_overwrite_max_allowed" long:"helper-cpu-request-overwrite-max-allowed" env:"KUBERNETES_HELPER_CPU_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper cpu request can be set to. Used with the KUBERNETES_HELPER_CPU_REQUEST variable in the build."` HelperMemoryLimit string `toml:"helper_memory_limit,omitempty" json:"helper_memory_limit" long:"helper-memory-limit" env:"KUBERNETES_HELPER_MEMORY_LIMIT" description:"The amount of memory allocated to build helper containers"` HelperMemoryLimitOverwriteMaxAllowed string `toml:"helper_memory_limit_overwrite_max_allowed,omitempty" json:"helper_memory_limit_overwrite_max_allowed" long:"helper-memory-limit-overwrite-max-allowed" env:"KUBERNETES_HELPER_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper memory limit can be set to. Used with the KUBERNETES_HELPER_MEMORY_LIMIT variable in the build."` HelperMemoryRequest string `toml:"helper_memory_request,omitempty" json:"helper_memory_request" long:"helper-memory-request" env:"KUBERNETES_HELPER_MEMORY_REQUEST" description:"The amount of memory requested for build helper containers"` HelperMemoryRequestOverwriteMaxAllowed string `toml:"helper_memory_request_overwrite_max_allowed,omitempty" json:"helper_memory_request_overwrite_max_allowed" long:"helper-memory-request-overwrite-max-allowed" env:"KUBERNETES_HELPER_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper memory request can be set to. Used with the KUBERNETES_HELPER_MEMORY_REQUEST variable in the build."` HelperEphemeralStorageLimit string `toml:"helper_ephemeral_storage_limit,omitempty" json:"helper_ephemeral_storage_limit" long:"helper-ephemeral_storage-limit" env:"KUBERNETES_HELPER_EPHEMERAL_STORAGE_LIMIT" description:"The amount of ephemeral storage allocated to build helper containers"` HelperEphemeralStorageLimitOverwriteMaxAllowed string `toml:"helper_ephemeral_storage_limit_overwrite_max_allowed,omitempty" json:"helper_ephemeral_storage_limit_overwrite_max_allowed" long:"helper-ephemeral_storage-limit-overwrite-max-allowed" env:"KUBERNETES_HELPER_EPHEMERAL_STORAGE_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper ephemeral storage limit can be set to. Used with the KUBERNETES_HELPER_EPHEMERAL_STORAGE_LIMIT variable in the build."` HelperEphemeralStorageRequest string `toml:"helper_ephemeral_storage_request,omitempty" json:"helper_ephemeral_storage_request" long:"helper-ephemeral_storage-request" env:"KUBERNETES_HELPER_EPHEMERAL_STORAGE_REQUEST" description:"The amount of ephemeral storage requested for build helper containers"` HelperEphemeralStorageRequestOverwriteMaxAllowed string `toml:"helper_ephemeral_storage_request_overwrite_max_allowed,omitempty" json:"helper_ephemeral_storage_request_overwrite_max_allowed" long:"helper-ephemeral_storage-request-overwrite-max-allowed" env:"KUBERNETES_HELPER_EPHEMERAL_STORAGE_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the max amount the helper ephemeral storage request can be set to. Used with the KUBERNETES_HELPER_EPHEMERAL_STORAGE_REQUEST variable in the build."` PodCPULimit string `toml:"pod_cpu_limit,omitempty" json:"pod_cpu_limit" long:"pod-cpu-limit" env:"KUBERNETES_POD_CPU_LIMIT" description:"The CPU allocation given to the build pod"` PodCPULimitOverwriteMaxAllowed string `toml:"pod_cpu_limit_overwrite_max_allowed,omitempty" json:"pod_cpu_limit_overwrite_max_allowed" long:"pod-cpu-limit-overwrite-max-allowed" env:"KUBERNETES_POD_CPU_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the maximum amount the pod CPU limit can be set to. Used with the KUBERNETES_POD_CPU_LIMIT variable in the build."` PodCPURequest string `toml:"pod_cpu_request,omitempty" json:"pod_cpu_request" long:"pod-cpu-request" env:"KUBERNETES_POD_CPU_REQUEST" description:"The CPU allocation requested for the build pod"` PodCPURequestOverwriteMaxAllowed string `toml:"pod_cpu_request_overwrite_max_allowed,omitempty" json:"pod_cpu_request_overwrite_max_allowed" long:"pod-cpu-request-overwrite-max-allowed" env:"KUBERNETES_POD_CPU_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the maximum amount the pod CPU request can be set to. Used with the KUBERNETES_POD_CPU_REQUEST variable in the build."` PodMemoryLimit string `toml:"pod_memory_limit,omitempty" json:"pod_memory_limit" long:"pod-memory-limit" env:"KUBERNETES_POD_MEMORY_LIMIT" description:"The amount of memory allocated to the build pod"` PodMemoryLimitOverwriteMaxAllowed string `toml:"pod_memory_limit_overwrite_max_allowed,omitempty" json:"pod_memory_limit_overwrite_max_allowed" long:"pod-memory-limit-overwrite-max-allowed" env:"KUBERNETES_POD_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED" description:"If set, the maximum amount the pod memory limit can be set to. Used with the KUBERNETES_POD_MEMORY_LIMIT variable in the build."` PodMemoryRequest string `toml:"pod_memory_request,omitempty" json:"pod_memory_request" long:"pod-memory-request" env:"KUBERNETES_POD_MEMORY_REQUEST" description:"The amount of memory requested from the build pod"` PodMemoryRequestOverwriteMaxAllowed string `toml:"pod_memory_request_overwrite_max_allowed,omitempty" json:"pod_memory_request_overwrite_max_allowed" long:"pod-memory-request-overwrite-max-allowed" env:"KUBERNETES_POD_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED" description:"If set, the maximum amount the pod memory request can be set to. Used with the KUBERNETES_POD_MEMORY_REQUEST variable in the build."` AllowedImages []string `toml:"allowed_images,omitempty" json:"allowed_images,omitempty" long:"allowed-images" env:"KUBERNETES_ALLOWED_IMAGES" description:"Image allowlist"` AllowedPullPolicies []DockerPullPolicy `toml:"allowed_pull_policies,omitempty" json:"allowed_pull_policies,omitempty" long:"allowed-pull-policies" env:"KUBERNETES_ALLOWED_PULL_POLICIES" description:"Pull policy allowlist"` AllowedServices []string `toml:"allowed_services,omitempty" json:"allowed_services,omitempty" long:"allowed-services" env:"KUBERNETES_ALLOWED_SERVICES" description:"Service allowlist"` AllowedUsers []string `toml:"allowed_users,omitempty" json:"allowed_users,omitempty" long:"allowed-users" env:"KUBERNETES_ALLOWED_USERS" description:"User allowlist"` AllowedGroups []string `toml:"allowed_groups,omitempty" json:"allowed_groups,omitempty" long:"allowed-groups" env:"KUBERNETES_ALLOWED_GROUPS" description:"Group allowlist"` PullPolicy StringOrArray `toml:"pull_policy,omitempty" json:"pull_policy,omitempty" long:"pull-policy" env:"KUBERNETES_PULL_POLICY" description:"Policy for if/when to pull a container image (never, if-not-present, always). The cluster default will be used if not set"` NodeSelector map[string]string `toml:"node_selector,omitempty" json:"node_selector,omitempty" long:"node-selector" env:"KUBERNETES_NODE_SELECTOR" description:"A toml table/json object of key:value. Value is expected to be a string. When set this will create pods on k8s nodes that match all the key:value pairs. Only one selector is supported through environment variable configuration."` NodeSelectorOverwriteAllowed string `toml:"node_selector_overwrite_allowed" json:"node_selector_overwrite_allowed" long:"node_selector_overwrite_allowed" env:"KUBERNETES_NODE_SELECTOR_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_NODE_SELECTOR_*' values"` NodeTolerations map[string]string `toml:"node_tolerations,omitempty" json:"node_tolerations,omitempty" long:"node-tolerations" env:"KUBERNETES_NODE_TOLERATIONS" description:"A toml table/json object of key=value:effect. Value and effect are expected to be strings. When set, pods will tolerate the given taints. Only one toleration is supported through environment variable configuration."` NodeTolerationsOverwriteAllowed string `toml:"node_tolerations_overwrite_allowed" json:"node_tolerations_overwrite_allowed" long:"node_tolerations_overwrite_allowed" env:"KUBERNETES_NODE_TOLERATIONS_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_NODE_TOLERATIONS_*' values"` Affinity KubernetesAffinity `toml:"affinity,omitempty" json:"affinity" long:"affinity" description:"Kubernetes Affinity setting that is used to select the node that spawns a pod"` ImagePullSecrets []string `toml:"image_pull_secrets,omitempty" json:"image_pull_secrets,omitempty" long:"image-pull-secrets" env:"KUBERNETES_IMAGE_PULL_SECRETS" description:"A list of image pull secrets that are used for pulling docker image"` UseServiceAccountImagePullSecrets bool `toml:"use_service_account_image_pull_secrets,omitempty" json:"use_service_account_image_pull_secrets" long:"use-service-account-image-pull-secrets" env:"KUBERNETES_USE_SERVICE_ACCOUNT_IMAGE_PULL_SECRETS" description:"Do not provide any image pull secrets to the Pod created, so the secrets from the ServiceAccount can be used"` HelperImage string `toml:"helper_image,omitempty" json:"helper_image" long:"helper-image" env:"KUBERNETES_HELPER_IMAGE" description:"[ADVANCED] Override the default helper image used to clone repos and upload artifacts"` HelperImageFlavor string `toml:"helper_image_flavor,omitempty" json:"helper_image_flavor" long:"helper-image-flavor" env:"KUBERNETES_HELPER_IMAGE_FLAVOR" description:"Set helper image flavor (alpine, ubuntu), defaults to alpine"` HelperImageAutosetArchAndOS bool `toml:"helper_image_autoset_arch_and_os,omitempty" json:"helper_image_autoset_arch_and_os" long:"helper-image-autoset-arch-and-os" env:"KUBERNETES_HELPER_IMAGE_AUTOSET_ARCH_AND_OS" description:"When set, it uses the underlying OS to set the Helper Image ARCH and OS"` PodTerminationGracePeriodSeconds *int64 `toml:"pod_termination_grace_period_seconds,omitzero" json:"pod_termination_grace_period_seconds,omitempty" long:"pod_termination_grace_period_seconds" env:"KUBERNETES_POD_TERMINATION_GRACE_PERIOD_SECONDS" description:"Pod-level setting which determines the duration in seconds which the pod has to terminate gracefully. After this, the processes are forcibly halted with a kill signal. Ignored if KUBERNETES_TERMINATIONGRACEPERIODSECONDS is specified."` CleanupGracePeriodSeconds *int64 `toml:"cleanup_grace_period_seconds" json:"cleanup_grace_period_seconds,omitempty" long:"cleanup_grace_period_seconds" env:"KUBERNETES_CLEANUP_GRACE_PERIOD_SECONDS" description:"When cleaning up a pod on completion of a job, the duration in seconds which the pod has to terminate gracefully. After this, the processes are forcibly halted with a kill signal. Ignored if KUBERNETES_TERMINATIONGRACEPERIODSECONDS is specified."` CleanupResourcesTimeout *time.Duration `toml:"cleanup_resources_timeout,omitzero" json:"cleanup_resources_timeout,omitempty" long:"cleanup_resources_timeout" env:"KUBERNETES_CLEANUP_RESOURCES_TIMEOUT" description:"The total amount of time for Kubernetes resources to be cleaned up after the job completes. Supported syntax: '1h30m', '300s', '10m'. Default is 5 minutes ('5m')."` PollInterval int `toml:"poll_interval,omitzero" json:"poll_interval" long:"poll-interval" env:"KUBERNETES_POLL_INTERVAL" description:"How frequently, in seconds, the runner will poll the Kubernetes pod it has just created to check its status"` PollTimeout int `toml:"poll_timeout,omitzero" json:"poll_timeout" long:"poll-timeout" env:"KUBERNETES_POLL_TIMEOUT" description:"The total amount of time, in seconds, that needs to pass before the runner will timeout attempting to connect to the pod it has just created (useful for queueing more builds that the cluster can handle at a time)"` ResourceAvailabilityCheckMaxAttempts int `toml:"resource_availability_check_max_attempts,omitzero" json:"resource_availability_check_max_attempts" long:"resource-availability-check-max-attempts" env:"KUBERNETES_RESOURCE_AVAILABILITY_CHECK_MAX_ATTEMPTS" default:"5" description:"The maximum number of attempts to check if a resource (service account and/or pull secret) set is available before giving up. There is 5 seconds interval between each attempt"` RequestRetryLimit RequestRetryLimit `toml:"retry_limit,omitzero" json:"retry_limit" long:"retry-limit" env:"KUBERNETES_REQUEST_RETRY_LIMIT" default:"5" description:"The maximum number of attempts to communicate with Kubernetes API. The retry interval between each attempt is based on a backoff algorithm starting at 500 ms"` RequestRetryBackoffMax RequestRetryBackoffMax `toml:"retry_backoff_max,omitzero" json:"retry_backoff_max" long:"retry-backoff-max" env:"KUBERNETES_REQUEST_RETRY_BACKOFF_MAX" default:"2000" description:"The max backoff interval value in milliseconds that can be reached for retry attempts to communicate with Kubernetes API"` RequestRetryLimits RequestRetryLimits `toml:"retry_limits" json:"retry_limits,omitempty" long:"retry-limits" env:"KUBERNETES_RETRY_LIMITS" description:"How many times each request error is to be retried"` PodLabels map[string]string `toml:"pod_labels,omitempty" json:"pod_labels,omitempty" long:"pod-labels" description:"A toml table/json object of key-value. Value is expected to be a string. When set, this will create pods with the given pod labels. Environment variables will be substituted for values here."` PodLabelsOverwriteAllowed string `toml:"pod_labels_overwrite_allowed" json:"pod_labels_overwrite_allowed" long:"pod_labels_overwrite_allowed" env:"KUBERNETES_POD_LABELS_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_POD_LABELS_*' values"` SchedulerName string `toml:"scheduler_name,omitempty" json:"scheduler_name" long:"scheduler-name" env:"KUBERNETES_SCHEDULER_NAME" description:"Pods will be scheduled using this scheduler, if it exists"` ServiceAccount string `toml:"service_account,omitempty" json:"service_account" long:"service-account" env:"KUBERNETES_SERVICE_ACCOUNT" description:"Executor pods will use this Service Account to talk to kubernetes API"` ServiceAccountOverwriteAllowed string `toml:"service_account_overwrite_allowed" json:"service_account_overwrite_allowed" long:"service_account_overwrite_allowed" env:"KUBERNETES_SERVICE_ACCOUNT_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_SERVICE_ACCOUNT' value"` AutomountServiceAccountToken *bool `toml:"automount_service_account_token,omitzero" json:"automount_service_account_token,omitempty" long:"automount-service-account-token" env:"KUBERNETES_AUTOMOUNT_SERVICE_ACCOUNT_TOKEN" description:"Boolean to control the automount of the service account token in the build pod."` PodAnnotations map[string]string `toml:"pod_annotations,omitempty" json:"pod_annotations,omitempty" long:"pod-annotations" description:"A toml table/json object of key-value. Value is expected to be a string. When set, this will create pods with the given annotations. Can be overwritten in build with KUBERNETES_POD_ANNOTATION_* variables"` PodAnnotationsOverwriteAllowed string `toml:"pod_annotations_overwrite_allowed" json:"pod_annotations_overwrite_allowed" long:"pod_annotations_overwrite_allowed" env:"KUBERNETES_POD_ANNOTATIONS_OVERWRITE_ALLOWED" description:"Regex to validate 'KUBERNETES_POD_ANNOTATIONS_*' values"` PodSecurityContext KubernetesPodSecurityContext `toml:"pod_security_context,omitempty" namespace:"pod-security-context" description:"A security context attached to each build pod"` InitPermissionsContainerSecurityContext KubernetesContainerSecurityContext `toml:"init_permissions_container_security_context,omitempty" namespace:"init_permissions_container_security_context" description:"A security context attached to the init-permissions container inside the build pod"` BuildContainerSecurityContext KubernetesContainerSecurityContext `toml:"build_container_security_context,omitempty" namespace:"build_container_security_context" description:"A security context attached to the build container inside the build pod"` HelperContainerSecurityContext KubernetesContainerSecurityContext `toml:"helper_container_security_context,omitempty" namespace:"helper_container_security_context" description:"A security context attached to the helper container inside the build pod"` ServiceContainerSecurityContext KubernetesContainerSecurityContext `toml:"service_container_security_context,omitempty" namespace:"service_container_security_context" description:"A security context attached to the service containers inside the build pod"` Volumes KubernetesVolumes `toml:"volumes"` HostAliases KubernetesHostAliasesFlag `toml:"host_aliases,omitempty" json:"host_aliases,omitempty" long:"host_aliases" description:"Add a custom host-to-IP mapping"` Services []Service `toml:"services,omitempty" json:"services,omitempty" description:"Add service that is started with container"` CapAdd []string `toml:"cap_add" json:"cap_add,omitempty" long:"cap-add" env:"KUBERNETES_CAP_ADD" description:"Add Linux capabilities"` CapDrop []string `toml:"cap_drop" json:"cap_drop,omitempty" long:"cap-drop" env:"KUBERNETES_CAP_DROP" description:"Drop Linux capabilities"` DNSPolicy KubernetesDNSPolicy `toml:"dns_policy,omitempty" json:"dns_policy" long:"dns-policy" env:"KUBERNETES_DNS_POLICY" description:"How Kubernetes should try to resolve DNS from the created pods. If unset, Kubernetes will use the default 'ClusterFirst'. Valid values are: none, default, cluster-first, cluster-first-with-host-net"` DNSConfig KubernetesDNSConfig `toml:"dns_config" json:"dns_config" description:"Pod DNS config"` ContainerLifecycle KubernetesContainerLifecyle `toml:"container_lifecycle,omitempty" json:"container_lifecycle,omitempty" description:"Actions that the management system should take in response to container lifecycle events"` PriorityClassName string `toml:"priority_class_name,omitempty" json:"priority_class_name" long:"priority_class_name" env:"KUBERNETES_PRIORITY_CLASS_NAME" description:"If set, the Kubernetes Priority Class to be set to the Pods"` PodSpec []KubernetesPodSpec `toml:"pod_spec" json:",omitempty"` LogsBaseDir string `toml:"logs_base_dir,omitempty" json:"logs_base_dir" long:"logs-base-dir" env:"KUBERNETES_LOGS_BASE_DIR" description:"Base directory for the path where build logs are stored. This directory is prepended to the final generated path. For example, /logs--."` ScriptsBaseDir string `toml:"scripts_base_dir,omitempty" json:"scripts_base_dir" long:"scripts-base-dir" env:"KUBERNETES_SCRIPTS_BASE_DIR" description:"Base directory for the path where build scripts are stored. This directory is prepended to the final generated path. For example, /scripts--."` PrintPodWarningEvents *bool `toml:"print_pod_warning_events,omitempty" json:"print_pod_warning_events,omitempty" long:"print-pod-warning-events" env:"KUBERNETES_PRINT_POD_WARNING_EVENTS" description:"When enabled, all warning events associated with the pod are retrieved when the job fails. Enabled by default."` PodDisruptionBudget *bool `toml:"pod_disruption_budget,omitzero" json:"pod_disruption_budget,omitempty" long:"pod-disruption-budget" env:"KUBERNETES_POD_DISRUPTION_BUDGET" description:"When enabled, a PodDisruptionBudget is created for each job pod to prevent eviction during node drains. Disabled by default."` Autoscaler *KubernetesAutoscalerConfig `toml:"autoscaler,omitempty" json:"autoscaler,omitempty" description:"Autoscaler configuration for pause pods"` } // KubernetesAutoscalerConfig defines autoscaling configuration for pause pods in the Kubernetes executor. type KubernetesAutoscalerConfig struct { // MaxPausePods is the maximum number of pause pods that can be created. 0 means unlimited. MaxPausePods int `toml:"max_pause_pods,omitempty" json:"max_pause_pods,omitempty" description:"Maximum number of pause pods to create. 0 means unlimited."` // PausePodImage is the image to use for pause pods. Defaults to registry.k8s.io/pause:3.10. PausePodImage string `toml:"pause_pod_image,omitempty" json:"pause_pod_image,omitempty" description:"Image to use for pause pods. Defaults to registry.k8s.io/pause:3.10."` // PausePodPriorityClassName is the priority class for pause pods. Should be lower than job pods. PausePodPriorityClassName string `toml:"pause_pod_priority_class_name,omitempty" json:"pause_pod_priority_class_name,omitempty" description:"Priority class for pause pods. Should be lower priority than job pods to enable preemption."` // Policy defines the scaling policies for pause pods. Policy []AutoscalerPolicyConfig `toml:"policy,omitempty" json:"policy,omitempty" description:"Scaling policies for pause pods"` } type RequestRetryLimit int func (r RequestRetryLimit) Get() int { if r > 0 { return int(r) } return DefaultRequestRetryLimit } type RequestRetryLimits map[string]int type RequestRetryBackoffMax int func (r RequestRetryBackoffMax) Get() time.Duration { switch { case r <= 0: return DefaultRequestRetryBackoffMax case time.Duration(r)*time.Millisecond <= RequestRetryBackoffMin: return RequestRetryBackoffMin default: return time.Duration(r) * time.Millisecond } } type KubernetesPodSpec struct { Name string `toml:"name"` PatchPath string `toml:"patch_path"` Patch string `toml:"patch"` PatchType KubernetesPodSpecPatchType `toml:"patch_type"` } // PodSpecPatch returns the patch data (JSON encoded) and type func (s *KubernetesPodSpec) PodSpecPatch() ([]byte, KubernetesPodSpecPatchType, error) { patchBytes := []byte(s.Patch) patchType := s.PatchType if patchType == "" { patchType = PatchTypeStrategicMergePatchType } if s.PatchPath != "" { if s.Patch != "" { return nil, "", fmt.Errorf("%w (%s)", errPatchAmbiguous, s.Name) } var err error patchBytes, err = os.ReadFile(s.PatchPath) if err != nil { return nil, "", fmt.Errorf("%w (%s): %w", errPatchFileFail, s.Name, err) } } patchBytes, err := yaml.YAMLToJSON(patchBytes) if err != nil { return nil, "", fmt.Errorf("%w (%s): %w", errPatchConversion, s.Name, err) } return patchBytes, patchType, nil } type KubernetesPodSpecPatchType string const ( PatchTypeJSONPatchType = KubernetesPodSpecPatchType("json") PatchTypeMergePatchType = KubernetesPodSpecPatchType("merge") PatchTypeStrategicMergePatchType = KubernetesPodSpecPatchType("strategic") ) type KubernetesDNSConfig struct { Nameservers []string `toml:"nameservers" json:",omitempty" description:"A list of IP addresses that will be used as DNS servers for the Pod."` Options []KubernetesDNSConfigOption `toml:"options" json:",omitempty" description:"An optional list of objects where each object may have a name property (required) and a value property (optional)."` Searches []string `toml:"searches" json:",omitempty" description:"A list of DNS search domains for hostname lookup in the Pod."` } type KubernetesDNSConfigOption struct { Name string `toml:"name"` Value *string `toml:"value,omitempty"` } type KubernetesVolumes struct { HostPaths []KubernetesHostPath `toml:"host_path" json:",omitempty" description:"The host paths which will be mounted"` PVCs []KubernetesPVC `toml:"pvc" json:",omitempty" description:"The persistent volume claims that will be mounted"` ConfigMaps []KubernetesConfigMap `toml:"config_map" json:",omitempty" description:"The config maps which will be mounted as volumes"` Secrets []KubernetesSecret `toml:"secret" json:",omitempty" description:"The secret maps which will be mounted"` EmptyDirs []KubernetesEmptyDir `toml:"empty_dir" json:",omitempty" description:"The empty dirs which will be mounted"` CSIs []KubernetesCSI `toml:"csi" json:",omitempty" description:"The CSI volumes which will be mounted"` NFSVolumes []KubernetesNFS `toml:"nfs" json:",omitempty" description:"The NSF volumes which will be mounted"` } type KubernetesConfigMap struct { Name string `toml:"name" json:"name" description:"The name of the volume and ConfigMap to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` Items map[string]string `toml:"items,omitempty" json:",omitempty" description:"Key-to-path mapping for keys from the config map that should be used."` } type KubernetesHostPath struct { Name string `toml:"name" json:"name" description:"The name of the volume"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` HostPath string `toml:"host_path,omitempty" description:"Path from the host that should be mounted as a volume"` MountPropagation *string `toml:"mount_propagation,omitempty" description:"Mount propagation mode for the volume"` } type KubernetesPVC struct { Name string `toml:"name" json:"name" description:"The name of the volume and PVC to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` MountPropagation *string `toml:"mount_propagation,omitempty" description:"Mount propagation mode for the volume"` } type KubernetesSecret struct { Name string `toml:"name" json:"name" description:"The name of the volume and Secret to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` Items map[string]string `toml:"items,omitempty" json:",omitempty" description:"Key-to-path mapping for keys from the secret that should be used."` } type KubernetesEmptyDir struct { Name string `toml:"name" json:"name" description:"The name of the volume and EmptyDir to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` Medium string `toml:"medium,omitempty" description:"Set to 'Memory' to have a tmpfs"` SizeLimit string `toml:"size_limit,omitempty" description:"Total amount of local storage required."` MountPropagation *string `toml:"mount_propagation,omitempty" description:"Mount propagation mode for the volume"` } type KubernetesCSI struct { Name string `toml:"name" json:"name" description:"The name of the CSI volume and volumeMount to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` Driver string `toml:"driver" description:"A string value that specifies the name of the volume driver to use."` FSType string `toml:"fs_type" description:"Filesystem type to mount. If not provided, the empty value is passed to the associated CSI driver which will determine the default filesystem to apply."` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` VolumeAttributes map[string]string `toml:"volume_attributes,omitempty" json:",omitempty" description:"Key-value pair mapping for attributes of the CSI volume."` } type KubernetesNFS struct { Name string `toml:"name" json:"name" description:"The name of the NFS volume and volumeMount to use"` MountPath string `toml:"mount_path" description:"Path where volume should be mounted inside of container"` SubPath string `toml:"sub_path,omitempty" description:"The sub-path of the volume to mount (defaults to volume root)"` Server string `toml:"server" description:"The NFS server that should be mounted"` Path string `toml:"path" description:"The path of the NFS share to mount"` ReadOnly bool `toml:"read_only,omitempty" description:"If this volume should be mounted read only"` } func (n *KubernetesNFS) UnmarshalTOML(data any) error { m, ok := data.(map[string]interface{}) if !ok { return fmt.Errorf("nfs volume: expected a table, got %T", data) } if v, ok := m["name"].(string); ok { n.Name = v } if v, ok := m["mount_path"].(string); ok { n.MountPath = v } if v, ok := m["sub_path"].(string); ok { n.SubPath = v } if v, ok := m["server"].(string); ok { n.Server = v } if v, ok := m["path"].(string); ok { n.Path = v } if v, ok := m["read_only"].(bool); ok { n.ReadOnly = v } var missing []string if n.Name == "" { missing = append(missing, "name") } if n.MountPath == "" { missing = append(missing, "mount_path") } if n.Server == "" { missing = append(missing, "server") } if n.Path == "" { missing = append(missing, "path") } if len(missing) > 0 { return fmt.Errorf("nfs volume: missing required fields: %s", strings.Join(missing, ", ")) } return nil } type KubernetesSeccompProfile struct { Type string `toml:"type,omitempty" json:",omitempty" long:"type" env:"@TYPE" description:"The seccomp profile type. Valid values: RuntimeDefault, Localhost, Unconfined"` LocalhostProfile string `toml:"localhost_profile,omitempty" json:",omitempty" long:"localhost-profile" env:"@LOCALHOST_PROFILE" description:"The path to a seccomp profile on the node. Required when type is Localhost"` } type KubernetesAppArmorProfile struct { Type string `toml:"type,omitempty" json:",omitempty" long:"type" env:"@TYPE" description:"The AppArmor profile type. Valid values: RuntimeDefault, Localhost, Unconfined. Requires Kubernetes >= 1.30"` LocalhostProfile string `toml:"localhost_profile,omitempty" json:",omitempty" long:"localhost-profile" env:"@LOCALHOST_PROFILE" description:"The name of an AppArmor profile on the node. Required when type is Localhost"` } type KubernetesPodSecurityContext struct { FSGroup *int64 `toml:"fs_group,omitempty" json:",omitempty" long:"fs-group" env:"KUBERNETES_POD_SECURITY_CONTEXT_FS_GROUP" description:"A special supplemental group that applies to all containers in a pod"` RunAsGroup *int64 `toml:"run_as_group,omitempty" json:",omitempty" long:"run-as-group" env:"KUBERNETES_POD_SECURITY_CONTEXT_RUN_AS_GROUP" description:"The GID to run the entrypoint of the container process"` RunAsNonRoot *bool `toml:"run_as_non_root,omitempty" json:",omitempty" long:"run-as-non-root" env:"KUBERNETES_POD_SECURITY_CONTEXT_RUN_AS_NON_ROOT" description:"Indicates that the container must run as a non-root user"` RunAsUser *int64 `toml:"run_as_user,omitempty" json:",omitempty" long:"run-as-user" env:"KUBERNETES_POD_SECURITY_CONTEXT_RUN_AS_USER" description:"The UID to run the entrypoint of the container process"` SupplementalGroups []int64 `toml:"supplemental_groups,omitempty" json:",omitempty" long:"supplemental-groups" description:"A list of groups applied to the first process run in each container, in addition to the container's primary GID"` SELinuxType string `toml:"selinux_type,omitempty" long:"selinux-type" description:"The SELinux type label that applies to all containers in a pod"` SeccompProfile *KubernetesSeccompProfile `toml:"seccomp_profile,omitempty" json:",omitempty" namespace:"seccomp_profile" description:"The seccomp profile for all containers in a pod"` AppArmorProfile *KubernetesAppArmorProfile `toml:"app_armor_profile,omitempty" json:",omitempty" namespace:"app_armor_profile" description:"The AppArmor profile for all containers in a pod. Requires Kubernetes >= 1.30"` } type KubernetesContainerCapabilities struct { Add []api.Capability `toml:"add" json:",omitempty" long:"add" env:"@ADD" description:"List of capabilities to add to the build container"` Drop []api.Capability `toml:"drop" json:",omitempty" long:"drop" env:"@DROP" description:"List of capabilities to drop from the build container"` } type KubernetesContainerSecurityContext struct { Capabilities *KubernetesContainerCapabilities `toml:"capabilities,omitempty" json:",omitempty" namespace:"capabilities" description:"The capabilities to add/drop when running the container"` Privileged *bool `toml:"privileged" json:",omitempty" long:"privileged" env:"@PRIVILEGED" description:"Run container in privileged mode"` RunAsUser *int64 `toml:"run_as_user,omitempty" json:",omitempty" long:"run-as-user" env:"@RUN_AS_USER" description:"The UID to run the entrypoint of the container process" ` RunAsGroup *int64 `toml:"run_as_group,omitempty" json:",omitempty" long:"run-as-group" env:"@RUN_AS_GROUP" description:"The GID to run the entrypoint of the container process" ` RunAsNonRoot *bool `toml:"run_as_non_root,omitempty" json:",omitempty" long:"run-as-non-root" env:"@RUN_AS_NON_ROOT" description:"Indicates that the container must run as a non-root user"` ReadOnlyRootFilesystem *bool `toml:"read_only_root_filesystem" json:",omitempty" long:"read-only-root-filesystem" env:"@READ_ONLY_ROOT_FILESYSTEM" description:" Whether this container has a read-only root filesystem."` AllowPrivilegeEscalation *bool `toml:"allow_privilege_escalation" json:",omitempty" long:"allow-privilege-escalation" env:"@ALLOW_PRIVILEGE_ESCALATION" description:"AllowPrivilegeEscalation controls whether a process can gain more privileges than its parent process"` SELinuxType string `toml:"selinux_type,omitempty" long:"selinux-type" description:"The SELinux type label that is associated with the container process"` ProcMount api.ProcMountType `toml:"proc_mount,omitempty" long:"proc-mount" env:"@PROC_MOUNT" description:"Denotes the type of proc mount to use for the container. Valid values: default | unmasked. Set to unmasked if this container will be used to build OCI images."` SeccompProfile *KubernetesSeccompProfile `toml:"seccomp_profile,omitempty" json:",omitempty" namespace:"seccomp_profile" description:"The seccomp profile for the container"` AppArmorProfile *KubernetesAppArmorProfile `toml:"app_armor_profile,omitempty" json:",omitempty" namespace:"app_armor_profile" description:"The AppArmor profile for the container. Requires Kubernetes >= 1.30"` } func (c *KubernetesConfig) getCapabilities(defaultCapDrop []string) *api.Capabilities { enabled := make(map[string]bool) for _, v := range defaultCapDrop { enabled[v] = false } for _, v := range c.CapAdd { enabled[v] = true } for _, v := range c.CapDrop { enabled[v] = false } if len(enabled) < 1 { return nil } return buildCapabilities(enabled) } func buildCapabilities(enabled map[string]bool) *api.Capabilities { capabilities := new(api.Capabilities) for c, add := range enabled { if add { capabilities.Add = append(capabilities.Add, api.Capability(c)) continue } capabilities.Drop = append(capabilities.Drop, api.Capability(c)) } return capabilities } func (c *KubernetesContainerSecurityContext) getProcMount() *api.ProcMountType { caser := cases.Title(language.English) pm := api.ProcMountType(caser.String(strings.TrimSpace(string(c.ProcMount)))) switch pm { case api.DefaultProcMount, api.UnmaskedProcMount: return &pm case "": logrus.Debugf("proc-mount not set") return nil default: logrus.Errorf("invalid proc-mount value: %s", c.ProcMount) return nil } } func validateProfileType[T ~string](kind string, typ T, valid []T) bool { if !slices.Contains(valid, typ) { logrus.Errorf("invalid %s profile type value: %s", kind, typ) return false } return true } func requireLocalhostProfile(kind, localhostProfile string) *string { if localhostProfile == "" { logrus.Errorf("%s profile type is Localhost but localhost_profile is not set", kind) return nil } return &localhostProfile } var validSeccompProfileTypes = []api.SeccompProfileType{ api.SeccompProfileTypeRuntimeDefault, api.SeccompProfileTypeUnconfined, api.SeccompProfileTypeLocalhost, } var validAppArmorProfileTypes = []api.AppArmorProfileType{ api.AppArmorProfileTypeRuntimeDefault, api.AppArmorProfileTypeUnconfined, api.AppArmorProfileTypeLocalhost, } func (p *KubernetesSeccompProfile) toAPI() *api.SeccompProfile { if p == nil || p.Type == "" { return nil } typ := api.SeccompProfileType(p.Type) if !validateProfileType("seccomp", typ, validSeccompProfileTypes) { return nil } profile := &api.SeccompProfile{Type: typ} if typ == api.SeccompProfileTypeLocalhost { profile.LocalhostProfile = requireLocalhostProfile("seccomp", p.LocalhostProfile) if profile.LocalhostProfile == nil { return nil } } return profile } func (p *KubernetesAppArmorProfile) toAPI() *api.AppArmorProfile { if p == nil || p.Type == "" { return nil } typ := api.AppArmorProfileType(p.Type) if !validateProfileType("apparmor", typ, validAppArmorProfileTypes) { return nil } profile := &api.AppArmorProfile{Type: typ} if typ == api.AppArmorProfileTypeLocalhost { profile.LocalhostProfile = requireLocalhostProfile("apparmor", p.LocalhostProfile) if profile.LocalhostProfile == nil { return nil } } return profile } func (c *KubernetesConfig) GetContainerSecurityContext( securityContext KubernetesContainerSecurityContext, defaultCapDrop ...string, ) *api.SecurityContext { var seLinuxOptions *api.SELinuxOptions if securityContext.SELinuxType != "" { seLinuxOptions = &api.SELinuxOptions{Type: securityContext.SELinuxType} } return &api.SecurityContext{ Capabilities: mergeCapabilitiesAddDrop( c.getCapabilities(defaultCapDrop), securityContext.getCapabilities(), ), Privileged: getContainerSecurityContextEffectiveFlagValue(securityContext.Privileged, c.Privileged), AllowPrivilegeEscalation: getContainerSecurityContextEffectiveFlagValue( securityContext.AllowPrivilegeEscalation, c.AllowPrivilegeEscalation, ), RunAsGroup: securityContext.RunAsGroup, RunAsNonRoot: securityContext.RunAsNonRoot, RunAsUser: securityContext.RunAsUser, ReadOnlyRootFilesystem: securityContext.ReadOnlyRootFilesystem, ProcMount: securityContext.getProcMount(), SELinuxOptions: seLinuxOptions, SeccompProfile: securityContext.SeccompProfile.toAPI(), AppArmorProfile: securityContext.AppArmorProfile.toAPI(), } } func mergeCapabilitiesAddDrop(capabilities ...*api.Capabilities) *api.Capabilities { merged := &api.Capabilities{} for _, c := range capabilities { if c == nil { continue } if c.Add != nil { merged.Add = c.Add } if c.Drop != nil { merged.Drop = c.Drop } } if merged.Add == nil && merged.Drop == nil { return nil } return merged } func getContainerSecurityContextEffectiveFlagValue(containerValue, fallbackValue *bool) *bool { if containerValue == nil { return fallbackValue } return containerValue } func (c *KubernetesContainerSecurityContext) getCapabilities() *api.Capabilities { capabilities := c.Capabilities if capabilities == nil { return nil } return &api.Capabilities{ Add: capabilities.Add, Drop: capabilities.Drop, } } type KubernetesAffinity struct { NodeAffinity *KubernetesNodeAffinity `toml:"node_affinity,omitempty" json:"node_affinity,omitempty" long:"node-affinity" description:"Node affinity is conceptually similar to nodeSelector -- it allows you to constrain which nodes your pod is eligible to be scheduled on, based on labels on the node."` PodAffinity *KubernetesPodAffinity `toml:"pod_affinity,omitempty" json:"pod_affinity,omitempty" description:"Pod affinity allows to constrain which nodes your pod is eligible to be scheduled on based on the labels on other pods."` PodAntiAffinity *KubernetesPodAntiAffinity `toml:"pod_anti_affinity,omitempty" json:"pod_anti_affinity,omitempty" description:"Pod anti-affinity allows to constrain which nodes your pod is eligible to be scheduled on based on the labels on other pods."` } type KubernetesNodeAffinity struct { RequiredDuringSchedulingIgnoredDuringExecution *NodeSelector `toml:"required_during_scheduling_ignored_during_execution,omitempty" json:"required_during_scheduling_ignored_during_execution,omitempty"` PreferredDuringSchedulingIgnoredDuringExecution []PreferredSchedulingTerm `toml:"preferred_during_scheduling_ignored_during_execution,omitempty" json:"preferred_during_scheduling_ignored_during_execution,omitempty"` } type KubernetesPodAffinity struct { RequiredDuringSchedulingIgnoredDuringExecution []PodAffinityTerm `toml:"required_during_scheduling_ignored_during_execution,omitempty" json:"required_during_scheduling_ignored_during_execution,omitempty"` PreferredDuringSchedulingIgnoredDuringExecution []WeightedPodAffinityTerm `toml:"preferred_during_scheduling_ignored_during_execution,omitempty" json:"preferred_during_scheduling_ignored_during_execution,omitempty"` } type KubernetesPodAntiAffinity struct { RequiredDuringSchedulingIgnoredDuringExecution []PodAffinityTerm `toml:"required_during_scheduling_ignored_during_execution,omitempty" json:"required_during_scheduling_ignored_during_execution,omitempty"` PreferredDuringSchedulingIgnoredDuringExecution []WeightedPodAffinityTerm `toml:"preferred_during_scheduling_ignored_during_execution,omitempty" json:"preferred_during_scheduling_ignored_during_execution,omitempty"` } type KubernetesHostAliases struct { IP string `toml:"ip" json:"ip" long:"ip" description:"The IP address you want to attach hosts to"` Hostnames []string `toml:"hostnames" json:"hostnames,omitempty" long:"hostnames" description:"A list of hostnames that will be attached to the IP"` } // https://kubernetes.io/docs/reference/generated/kubernetes-api/v1.21/#lifecycle-v1-core type KubernetesContainerLifecyle struct { PostStart *KubernetesLifecycleHandler `toml:"post_start,omitempty" json:"post_start,omitempty" description:"PostStart is called immediately after a container is created. If the handler fails, the container is terminated and restarted according to its restart policy. Other management of the container blocks until the hook completes"` PreStop *KubernetesLifecycleHandler `toml:"pre_stop,omitempty" json:"pre_stop,omitempty" description:"PreStop is called immediately before a container is terminated due to an API request or management event such as liveness/startup probe failure, preemption, resource contention, etc. The handler is not called if the container crashes or exits. The reason for termination is passed to the handler. The Pod's termination grace period countdown begins before the PreStop hooked is executed. Regardless of the outcome of the handler, the container will eventually terminate within the Pod's termination grace period. Other management of the container blocks until the hook completes or until the termination grace period is reached"` } type KubernetesLifecycleHandler struct { Exec *KubernetesLifecycleExecAction `toml:"exec" json:"exec,omitempty" description:"Exec specifies the action to take"` HTTPGet *KubernetesLifecycleHTTPGet `toml:"http_get" json:"http_get,omitempty" description:"HTTPGet specifies the http request to perform."` TCPSocket *KubernetesLifecycleTCPSocket `toml:"tcp_socket" json:"tcp_socket,omitempty" description:"TCPSocket specifies an action involving a TCP port"` } type KubernetesLifecycleExecAction struct { Command []string `toml:"command" json:"command,omitempty" description:"Command is the command line to execute inside the container, the working directory for the command is root ('/') in the container's filesystem. The command is simply exec'd, it is not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use a shell, you need to explicitly call out to that shell. Exit status of 0 is treated as live/healthy and non-zero is unhealthy"` } type KubernetesLifecycleHTTPGet struct { Host string `toml:"host" json:"host" description:"Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead"` HTTPHeaders []KubernetesLifecycleHTTPGetHeader `toml:"http_headers" json:"http_headers,omitempty" description:"Custom headers to set in the request. HTTP allows repeated headers"` Path string `toml:"path" json:"path" description:"Path to access on the HTTP server"` Port int `toml:"port" json:"port" description:"Number of the port to access on the container. Number must be in the range 1 to 65535"` Scheme string `toml:"scheme" json:"scheme" description:"Scheme to use for connecting to the host. Defaults to HTTP"` } type KubernetesLifecycleHTTPGetHeader struct { Name string `toml:"name" json:"name" description:"The header field name"` Value string `toml:"value" json:"value" description:"The header field value"` } type KubernetesLifecycleTCPSocket struct { Host string `toml:"host" json:"host" description:"Host name to connect to, defaults to the pod IP. You probably want to set \"Host\" in httpHeaders instead"` Port int `toml:"port" json:"port" description:"Number of the port to access on the container. Number must be in the range 1 to 65535"` } // ToKubernetesLifecycleHandler converts our lifecycle structs to the ones from the Kubernetes API. // We can't use them directly since they don't suppor toml. func (h *KubernetesLifecycleHandler) ToKubernetesLifecycleHandler() *api.LifecycleHandler { kubeHandler := &api.LifecycleHandler{} if h.Exec != nil { kubeHandler.Exec = &api.ExecAction{ Command: h.Exec.Command, } } if h.HTTPGet != nil { httpHeaders := []api.HTTPHeader{} for _, e := range h.HTTPGet.HTTPHeaders { httpHeaders = append(httpHeaders, api.HTTPHeader{ Name: e.Name, Value: e.Value, }) } kubeHandler.HTTPGet = &api.HTTPGetAction{ Host: h.HTTPGet.Host, Port: intstr.FromInt32(int32(h.HTTPGet.Port)), Path: h.HTTPGet.Path, Scheme: api.URIScheme(h.HTTPGet.Scheme), HTTPHeaders: httpHeaders, } } if h.TCPSocket != nil { kubeHandler.TCPSocket = &api.TCPSocketAction{ Host: h.TCPSocket.Host, Port: intstr.FromInt32(int32(h.TCPSocket.Port)), } } return kubeHandler } type NodeSelector struct { NodeSelectorTerms []NodeSelectorTerm `toml:"node_selector_terms" json:"node_selector_terms,omitempty"` } type PreferredSchedulingTerm struct { Weight int32 `toml:"weight" json:"weight"` Preference NodeSelectorTerm `toml:"preference" json:"preference"` } type WeightedPodAffinityTerm struct { Weight int32 `toml:"weight" json:"weight"` PodAffinityTerm PodAffinityTerm `toml:"pod_affinity_term" json:"pod_affinity_term"` } type NodeSelectorTerm struct { MatchExpressions []NodeSelectorRequirement `toml:"match_expressions,omitempty" json:"match_expressions,omitempty"` MatchFields []NodeSelectorRequirement `toml:"match_fields,omitempty" json:"match_fields,omitempty"` } type NodeSelectorRequirement struct { Key string `toml:"key,omitempty" json:"key"` Operator string `toml:"operator,omitempty" json:"operator"` Values []string `toml:"values,omitempty" json:"values,omitempty"` } type PodAffinityTerm struct { LabelSelector *LabelSelector `toml:"label_selector,omitempty" json:"label_selector,omitempty"` Namespaces []string `toml:"namespaces,omitempty" json:"namespaces,omitempty"` TopologyKey string `toml:"topology_key,omitempty" json:"topology_key"` NamespaceSelector *LabelSelector `toml:"namespace_selector,omitempty" json:"namespace_selector,omitempty"` } type LabelSelector struct { MatchLabels map[string]string `toml:"match_labels,omitempty" json:"match_labels,omitempty"` MatchExpressions []NodeSelectorRequirement `toml:"match_expressions,omitempty" json:"match_expressions,omitempty"` } type Service struct { Name string `toml:"name" long:"name" description:"The image path for the service"` Alias string `toml:"alias,omitempty" long:"alias" description:"Space or comma-separated aliases of the service."` Command []string `toml:"command" json:",omitempty" long:"command" description:"Command or script that should be used as the container’s command. Syntax is similar to https://docs.docker.com/engine/reference/builder/#cmd"` Entrypoint []string `toml:"entrypoint" json:",omitempty" long:"entrypoint" description:"Command or script that should be executed as the container’s entrypoint. syntax is similar to https://docs.docker.com/engine/reference/builder/#entrypoint"` Environment []string `toml:"environment,omitempty" json:"environment,omitempty" long:"env" description:"Custom environment variables injected to service environment"` } func (s *Service) Aliases() []string { return strings.Fields(strings.ReplaceAll(s.Alias, ",", " ")) } func (s *Service) ToImageDefinition() spec.Image { image := spec.Image{ Name: s.Name, Alias: s.Alias, Command: s.Command, Entrypoint: s.Entrypoint, } for _, environment := range s.Environment { if variable, err := parseVariable(environment); err == nil { variable.Internal = true image.Variables = append(image.Variables, variable) } } return image } type RunnerCredentials struct { URL string `toml:"url" json:"url" short:"u" long:"url" env:"CI_SERVER_URL" required:"true" description:"GitLab instance URL" jsonschema:"minLength=1"` ID int64 `toml:"id" json:"id" description:"Runner ID"` Token string `toml:"token" json:"token" short:"t" long:"token" env:"CI_SERVER_TOKEN" required:"true" description:"Runner token" jsonschema:"minLength=1"` TokenObtainedAt time.Time `toml:"token_obtained_at" json:"token_obtained_at" description:"When the runner authentication token was obtained"` TokenExpiresAt time.Time `toml:"token_expires_at" json:"token_expires_at" description:"Runner token expiration time"` TLSCAFile string `toml:"tls-ca-file,omitempty" json:"tls-ca-file" long:"tls-ca-file" env:"CI_SERVER_TLS_CA_FILE" description:"File containing the certificates to verify the peer when using HTTPS"` TLSCertFile string `toml:"tls-cert-file,omitempty" json:"tls-cert-file" long:"tls-cert-file" env:"CI_SERVER_TLS_CERT_FILE" description:"File containing certificate for TLS client auth when using HTTPS"` TLSKeyFile string `toml:"tls-key-file,omitempty" json:"tls-key-file" long:"tls-key-file" env:"CI_SERVER_TLS_KEY_FILE" description:"File containing private key for TLS client auth when using HTTPS"` Logger logrus.FieldLogger `toml:"-" json:",omitempty"` } type ArtifactConfig struct { UploadTimeout *time.Duration `toml:"upload_timeout,omitempty" json:"upload_timeout,omitempty"` ResponseHeaderTimeout *time.Duration `toml:"response_header_timeout,omitempty" json:"response_header_timeout,omitempty"` } func (a ArtifactConfig) GetUploadTimeout() time.Duration { if a.UploadTimeout == nil { return DefaultArtifactUploadTimeout } return *a.UploadTimeout } func (a ArtifactConfig) GetResponseHeaderTimeout() time.Duration { if a.ResponseHeaderTimeout == nil { return DefaultArtifactResponseHeaderTimeout } return *a.ResponseHeaderTimeout } // RunnerSettings contains the configuration fields for a runner worker. type RunnerSettings struct { Labels Labels `toml:"labels,omitempty" json:"labels,omitempty" description:"Custom labels for the runner worker. Duplicate keys will override any global defaults in this scope."` Executor string `toml:"executor" json:"executor" long:"executor" env:"RUNNER_EXECUTOR" required:"true" description:"Select executor, eg. shell, docker, etc."` BuildsDir string `toml:"builds_dir,omitempty" json:"builds_dir" long:"builds-dir" env:"RUNNER_BUILDS_DIR" description:"Directory where builds are stored"` CacheDir string `toml:"cache_dir,omitempty" json:"cache_dir" long:"cache-dir" env:"RUNNER_CACHE_DIR" description:"Directory where build cache is stored"` CloneURL string `toml:"clone_url,omitempty" json:"clone_url" long:"clone-url" env:"CLONE_URL" description:"Overwrite the default URL used to clone or fetch the git ref"` Environment []string `toml:"environment,omitempty" json:"environment,omitempty" long:"env" env:"RUNNER_ENV" description:"Custom environment variables injected to build environment"` ProxyExec *bool `toml:"proxy_exec,omitempty" json:"proxy_exec,omitempty" long:"proxy-exec" env:"RUNNER_PROXY_EXEC" description:"(Experimental) Proxy execution via helper binary"` PreGetSourcesScript string `toml:"pre_get_sources_script,omitempty" json:"pre_get_sources_script" long:"pre-get-sources-script" env:"RUNNER_PRE_GET_SOURCES_SCRIPT" description:"Runner-specific commands to be executed on the runner before updating the Git repository and updating submodules."` PostGetSourcesScript string `toml:"post_get_sources_script,omitempty" json:"post_get_sources_script" long:"post-get-sources-script" env:"RUNNER_POST_GET_SOURCES_SCRIPT" description:"Runner-specific commands to be executed on the runner after updating the Git repository and updating submodules."` PreBuildScript string `toml:"pre_build_script,omitempty" json:"pre_build_script" long:"pre-build-script" env:"RUNNER_PRE_BUILD_SCRIPT" description:"Runner-specific command script executed just before build executes"` PostBuildScript string `toml:"post_build_script,omitempty" json:"post_build_script" long:"post-build-script" env:"RUNNER_POST_BUILD_SCRIPT" description:"Runner-specific command script executed just after build executes"` PrepareTimeout *time.Duration `toml:"prepare_timeout,omitempty" json:"prepare_timeout,omitempty" long:"prepare-timeout" env:"RUNNER_PREPARE_TIMEOUT" description:"Timeout for the prepare stage of a job. Accepts duration strings like \"30s\" and \"1h30m\". Must not exceed the job timeout. Defaults to the job timeout."` DebugTraceDisabled bool `toml:"debug_trace_disabled,omitempty" json:"debug_trace_disabled" long:"debug-trace-disabled" env:"RUNNER_DEBUG_TRACE_DISABLED" description:"When set to true Runner will disable the possibility of using the CI_DEBUG_TRACE feature"` SafeDirectoryCheckout *bool `toml:"safe_directory_checkout,omitempty" json:"safe_directory_checkout,omitempty" long:"safe-directory-checkout" env:"RUNNER_SAFE_DIRECTORY_CHECKOUT" description:"When set to true, Git global configuration will get a safe.directory directive pointing the job's working directory'"` CleanGitConfig *bool `toml:"clean_git_config,omitempty" json:"clean_git_config,omitempty" long:"clean-git-config" env:"RUNNER_CLEAN_GIT_CONFIG" description:"Clean git configuration before and after the build. Defaults to true, except the shell executor is used or the git strategy is \"none\""` Shell string `toml:"shell,omitempty" json:"shell" long:"shell" env:"RUNNER_SHELL" description:"Select bash, sh, cmd, pwsh or powershell" jsonschema:"enum=bash,enum=sh,enum=cmd,enum=pwsh,enum=powershell,enum="` CustomBuildDir CustomBuildDir `toml:"custom_build_dir,omitempty" json:"custom_build_dir,omitempty" group:"custom build dir configuration" namespace:"custom_build_dir"` Referees *referees.Config `toml:"referees,omitempty" json:"referees,omitempty" group:"referees configuration" namespace:"referees"` Cache *cacheconfig.Config `toml:"cache,omitempty" json:"cache,omitempty" group:"cache configuration" namespace:"cache"` Artifact ArtifactConfig `toml:"artifact,omitempty" json:"artifact,omitempty"` // GracefulKillTimeout and ForceKillTimeout aren't exposed to the users yet // because not every executor supports it. We also have to keep in mind that // the CustomConfig has its configuration fields for termination so when // every executor supports graceful termination we should expose this single // configuration for all executors. GracefulKillTimeout *int `toml:"-" json:",omitempty"` ForceKillTimeout *int `toml:"-" json:",omitempty"` FeatureFlags map[string]bool `toml:"feature_flags" json:"feature_flags,omitempty" long:"feature-flags" env:"FEATURE_FLAGS" description:"Enable/Disable feature flags https://docs.gitlab.com/runner/configuration/feature-flags/"` Monitoring *runner.Monitoring `toml:"monitoring,omitempty" json:"monitoring,omitempty" long:"runner-monitoring" description:"(Experimental) Monitoring configuration specific to this runner"` // Slot-based cgroup configuration UseSlotCgroups bool `toml:"use_slot_cgroups,omitempty" json:"use_slot_cgroups" long:"use-slot-cgroups" env:"RUNNER_USE_SLOT_CGROUPS" description:"Use slot-derived cgroup names for resource isolation"` SlotCgroupTemplate string `toml:"slot_cgroup_template,omitempty" json:"slot_cgroup_template" long:"slot-cgroup-template" env:"RUNNER_SLOT_CGROUP_TEMPLATE" description:"Template for slot-derived cgroup names (use ${slot} placeholder)"` Instance *InstanceConfig `toml:"instance,omitempty" json:"instance,omitempty"` SSH *SshConfig `toml:"ssh,omitempty" json:"ssh,omitempty" group:"ssh executor" namespace:"ssh"` Docker *DockerConfig `toml:"docker,omitempty" json:"docker,omitempty" group:"docker executor" namespace:"docker"` Parallels *ParallelsConfig `toml:"parallels,omitempty" json:"parallels,omitempty" group:"parallels executor" namespace:"parallels"` VirtualBox *VirtualBoxConfig `toml:"virtualbox,omitempty" json:"virtualbox,omitempty" group:"virtualbox executor" namespace:"virtualbox"` Machine *DockerMachine `toml:"machine,omitempty" json:"machine,omitempty" group:"docker machine provider" namespace:"machine"` Kubernetes *KubernetesConfig `toml:"kubernetes,omitempty" json:"kubernetes,omitempty" group:"kubernetes executor" namespace:"kubernetes"` Custom *CustomConfig `toml:"custom,omitempty" json:"custom,omitempty" group:"custom executor" namespace:"custom"` Autoscaler *AutoscalerConfig `toml:"autoscaler,omitempty" json:",omitempty"` StepRunnerImage string `toml:"step_runner_image,omitempty" json:"step_runner_image" long:"step-runner-image" env:"STEP_RUNNER_IMAGE" description:"[ADVANCED] Override the default step-runner image used to inject the step-runner binary into the build container"` // this is the combined labels from global defaults and this specific runner's labels labels Labels } // RunnerConfig is the complete runtime representation of a runner worker, // loaded from one [[runners]] entry in config.toml. type RunnerConfig struct { Name string `toml:"name" json:"name" short:"name" long:"description" env:"RUNNER_NAME" description:"Runner name"` Limit int `toml:"limit,omitzero" json:"limit" long:"limit" env:"RUNNER_LIMIT" description:"Maximum number of builds processed by this runner"` OutputLimit int `toml:"output_limit,omitzero" long:"output-limit" env:"RUNNER_OUTPUT_LIMIT" description:"Maximum build trace size in kilobytes"` RequestConcurrency int `toml:"request_concurrency,omitzero" long:"request-concurrency" env:"RUNNER_REQUEST_CONCURRENCY" description:"Maximum concurrency for job requests" jsonschema:"min=1"` StrictCheckInterval *bool `toml:"strict_check_interval,omitzero" json:",omitempty" long:"strict-check-interval" env:"RUNNER_STRICT_CHECK_INTERVAL" description:"When you set StrictCheckInterval to true, the runner disables the faster-than-check_interval re-polling loop that occurs when a runner receives a job. Instead, the runner waits seconds before it polls again, even if additional jobs are available."` UnhealthyRequestsLimit int `toml:"unhealthy_requests_limit,omitzero" long:"unhealthy-requests-limit" env:"RUNNER_UNHEALTHY_REQUESTS_LIMIT" description:"The number of unhealthy responses to new job requests after which a runner worker is turned off."` UnhealthyInterval *time.Duration `toml:"unhealthy_interval,omitzero" json:",omitempty" long:"unhealthy-interval" ENV:"RUNNER_UNHEALTHY_INTERVAL" description:"Duration that the runner worker is turned off after it exceeds the unhealthy requests limit. Supports syntax like '3600s' and '1h30min'."` JobStatusFinalUpdateRetryLimit int `toml:"job_status_final_update_retry_limit,omitzero" json:"job_status_final_update_retry_limit,omitzero" long:"job-status-final-update-retry-limit" env:"RUNNER_job_status_final_update_retry_limit" description:"The maximum number of times GitLab Runner can retry to push the final job status to the GitLab instance."` SystemID string `toml:"-" json:",omitempty"` ConfigLoadedAt time.Time `toml:"-" json:",omitempty"` ConfigDir string `toml:"-" json:",omitempty"` RunnerCredentials RunnerSettings } type SessionServer struct { ListenAddress string `toml:"listen_address,omitempty" json:"listen_address" description:"Address that the runner will communicate directly with"` AdvertiseAddress string `toml:"advertise_address,omitempty" json:"advertise_address" description:"Address the runner will expose to the world to connect to the session server"` SessionTimeout int `toml:"session_timeout,omitempty" json:"session_timeout" description:"How long a terminal session can be active after a build completes, in seconds"` } type Config struct { ListenAddress string `toml:"listen_address,omitempty" json:"listen_address"` SessionServer SessionServer `toml:"session_server,omitempty" json:"session_server"` Labels Labels `toml:"labels,omitempty" json:"labels,omitempty" description:"Default custom labels for all runners."` Concurrent int `toml:"concurrent" json:"concurrent"` CheckInterval int `toml:"check_interval" json:"check_interval" description:"Define active checking interval of jobs"` LogLevel *string `toml:"log_level" json:"log_level,omitempty" description:"Define log level (one of: panic, fatal, error, warning, info, debug)"` LogFormat *string `toml:"log_format" json:"log_format,omitempty" description:"Define log format (one of: runner, text, json)"` User string `toml:"user,omitempty" json:"user"` Runners []*RunnerConfig `toml:"runners" json:"runners,omitempty"` SentryDSN *string `toml:"sentry_dsn" json:",omitempty"` ConnectionMaxAge *time.Duration `toml:"connection_max_age,omitempty" json:"connection_max_age,omitempty"` ModTime time.Time `toml:"-"` Loaded bool `toml:"-"` Machine *MachineConfig `toml:"machine,omitempty" json:"machine,omitempty"` Experimental *Experimental `toml:"experimental" json:"experimental,omitempty"` ShutdownTimeout int `toml:"shutdown_timeout,omitempty" json:"shutdown_timeout" description:"Number of seconds until the forceful shutdown operation times out and exits the process"` ConfigSaver ConfigSaver `toml:"-"` } // MachineConfig contains global configuration for the docker+machine executor provider. type MachineConfig struct { ShutdownDrain *DockerMachineShutdownDrain `toml:"shutdown_drain,omitempty" json:"shutdown_drain,omitempty" description:"Configuration for draining idle machines on shutdown"` } type Experimental struct { UsageLogger UsageLogger `toml:"usage_logger" json:"usage_logger,omitempty"` } type UsageLogger struct { Enabled bool `toml:"enabled" json:"enabled"` LogDir string `toml:"log_dir,omitempty" json:"log_dir,omitempty"` MaxBackupFiles *int64 `toml:"max_backup_files,omitempty" json:"max_backup_files,omitempty"` MaxRotationAge *time.Duration `toml:"max_rotation_age,omitempty" json:"max_rotation_age,omitempty"` Labels map[string]string `toml:"labels,omitempty" json:"labels,omitempty"` } type ConfigSaver interface { Save(filePath string, data []byte) error } type defaultConfigSaver struct{} func (s *defaultConfigSaver) Save(filePath string, data []byte) error { // create directory to store configuration err := os.MkdirAll(filepath.Dir(filePath), 0700) if err != nil { return fmt.Errorf("creating directory: %w", err) } // write config file err = os.WriteFile(filePath, data, 0o600) if err != nil { return fmt.Errorf("saving the file: %w", err) } return nil } type CustomBuildDir struct { Enabled *bool `toml:"enabled,omitempty" json:"enabled,omitempty" long:"enabled" env:"CUSTOM_BUILD_DIR_ENABLED" description:"Enable job specific build directories"` } func (r *RunnerSettings) ComputeLabels(globalDefaults Labels) { r.labels = make(Labels) for k, v := range globalDefaults { r.labels[k] = v } for k, v := range r.Labels { r.labels[k] = v } } func (r *RunnerSettings) ComputedLabels() Labels { return r.labels } func (r *RunnerSettings) GetGracefulKillTimeout() time.Duration { return getDuration(r.GracefulKillTimeout, process.GracefulTimeout) } func (r *RunnerSettings) GetForceKillTimeout() time.Duration { return getDuration(r.ForceKillTimeout, process.KillTimeout) } // IsFeatureFlagOn check if the specified feature flag is on. If the feature // flag is not configured it will return the default value. func (r *RunnerSettings) IsFeatureFlagOn(name string) bool { if r.IsFeatureFlagDefined(name) { return r.FeatureFlags[name] } for _, ff := range featureflags.GetAll() { if ff.Name == name { return ff.DefaultValue } } return false } // IsFeatureFlagDefined checks if the feature flag is defined in the runner // configuration. func (r *RunnerSettings) IsFeatureFlagDefined(name string) bool { _, ok := r.FeatureFlags[name] return ok } func getDuration(source *int, defaultValue time.Duration) time.Duration { if source == nil { return defaultValue } timeout := *source if timeout <= 0 { return defaultValue } return time.Duration(timeout) * time.Second } func (c *SessionServer) GetSessionTimeout() time.Duration { if c.SessionTimeout > 0 { return time.Duration(c.SessionTimeout) * time.Second } return DefaultSessionTimeout } type SshConfig struct { User string `toml:"user,omitempty" json:"user,omitempty" long:"user" env:"SSH_USER" description:"User name"` Password string `toml:"password,omitempty" json:"password,omitempty" long:"password" env:"SSH_PASSWORD" description:"User password"` Host string `toml:"host,omitempty" json:"host,omitempty" long:"host" env:"SSH_HOST" description:"Remote host"` Port string `toml:"port,omitempty" json:"port,omitempty" long:"port" env:"SSH_PORT" description:"Remote host port"` IdentityFile string `toml:"identity_file,omitempty" json:"identity_file,omitempty" long:"identity-file" env:"SSH_IDENTITY_FILE" description:"Identity file to be used"` DisableStrictHostKeyChecking *bool `toml:"disable_strict_host_key_checking,omitempty" json:"disable_strict_host_key_checking,omitempty" long:"disable-strict-host-key-checking" env:"DISABLE_STRICT_HOST_KEY_CHECKING" description:"Disable SSH strict host key checking"` KnownHostsFile string `toml:"known_hosts_file,omitempty" json:"known_hosts_file,omitempty" long:"known-hosts-file" env:"KNOWN_HOSTS_FILE" description:"Location of known_hosts file. Defaults to ~/.ssh/known_hosts"` } func (c *SshConfig) ShouldDisableStrictHostKeyChecking() bool { return c.DisableStrictHostKeyChecking != nil && *c.DisableStrictHostKeyChecking } func (c *DockerConfig) computeNanoCPUs(value string) (int64, error) { if value == "" { return 0, nil } cpu, ok := new(big.Rat).SetString(value) if !ok { return 0, fmt.Errorf("failed to parse %s as a rational number", value) } nano, _ := cpu.Mul(cpu, big.NewRat(1e9, 1)).Float64() return int64(nano), nil } func (c *DockerConfig) GetNanoCPUs() (int64, error) { return c.computeNanoCPUs(c.CPUS) } func (c *DockerConfig) GetServiceNanoCPUs() (int64, error) { return c.computeNanoCPUs(c.ServiceCPUS) } func (c *DockerConfig) getMemoryBytes(size string, fieldName string) int64 { if size == "" { return 0 } bytes, err := units.RAMInBytes(size) if err != nil { logrus.Fatalf("Error parsing docker %s: %s", fieldName, err) } return bytes } func (c *DockerConfig) GetMemory() int64 { return c.getMemoryBytes(c.Memory, "memory") } func (c *DockerConfig) GetMemorySwap() int64 { return c.getMemoryBytes(c.MemorySwap, "memory_swap") } func (c *DockerConfig) GetMemoryReservation() int64 { return c.getMemoryBytes(c.MemoryReservation, "memory_reservation") } func (c *DockerConfig) GetServiceMemory() int64 { return c.getMemoryBytes(c.ServiceMemory, "service_memory") } func (c *DockerConfig) GetServiceMemorySwap() int64 { return c.getMemoryBytes(c.ServiceMemorySwap, "service_memory_swap") } func (c *DockerConfig) GetServiceMemoryReservation() int64 { return c.getMemoryBytes(c.ServiceMemoryReservation, "service_memory_reservation") } func (c *DockerConfig) GetOomKillDisable() *bool { return &c.OomKillDisable } func getExpandedServices(services []Service, vars spec.Variables) []Service { result := []Service{} for _, s := range services { s.Name = vars.ExpandValue(s.Name) s.Alias = vars.ExpandValue(s.Alias) result = append(result, s) } return result } // GetExpandedServices returns the executor-configured services, with the values expanded. This is necessary because // some of the values in service definition can point to job variables, so the final value is job-dependant. // See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29499 func (c *DockerConfig) GetExpandedServices(vars spec.Variables) []Service { return getExpandedServices(c.Services, vars) } func (c *DockerConfig) GetServicesLimit() int { if c.ServicesLimit == nil { return -1 } return *c.ServicesLimit } // GetLogConfig returns the LogConfig for build containers func (c *DockerConfig) GetLogConfig() (container.LogConfig, error) { logConfig := container.LogConfig{ Type: "json-file", } if c == nil || len(c.LogOptions) == 0 { return logConfig, nil } var invalidKeys []string var allowedKeys = []string{"env", "labels"} for key := range c.LogOptions { if !slices.Contains(allowedKeys, key) { invalidKeys = append(invalidKeys, key) } } slices.Sort(invalidKeys) // to get stable error outputs if len(invalidKeys) > 0 { return logConfig, fmt.Errorf("invalid log options: only %q are allowed, but found: %q", allowedKeys, invalidKeys) } logConfig.Config = c.LogOptions return logConfig, nil } func (c *KubernetesConfig) GetPollTimeout() int { if c.PollTimeout <= 0 { c.PollTimeout = KubernetesPollTimeout } return c.PollTimeout } func (c *KubernetesConfig) GetPollInterval() int { if c.PollInterval <= 0 { c.PollInterval = KubernetesPollInterval } return c.PollInterval } func (c *KubernetesConfig) GetPollAttempts() int { return c.GetPollTimeout() / c.GetPollInterval() } func (c *KubernetesConfig) GetCleanupResourcesTimeout() time.Duration { if c.CleanupResourcesTimeout == nil || c.CleanupResourcesTimeout.Seconds() <= 0 { return KubernetesCleanupResourcesTimeout } return *c.CleanupResourcesTimeout } func (c *KubernetesConfig) GetResourceAvailabilityCheckMaxAttempts() int { if c.ResourceAvailabilityCheckMaxAttempts < 0 { c.ResourceAvailabilityCheckMaxAttempts = KubernetesResourceAvailabilityCheckMaxAttempts } return c.ResourceAvailabilityCheckMaxAttempts } func (c *KubernetesConfig) GetNodeTolerations() []api.Toleration { var tolerations []api.Toleration for toleration, effect := range c.NodeTolerations { newToleration := api.Toleration{ Effect: api.TaintEffect(effect), } if strings.Contains(toleration, "=") { parts := strings.Split(toleration, "=") newToleration.Key = parts[0] if len(parts) > 1 { newToleration.Value = parts[1] } newToleration.Operator = api.TolerationOpEqual } else { newToleration.Key = toleration newToleration.Operator = api.TolerationOpExists } tolerations = append(tolerations, newToleration) } return tolerations } func (c *KubernetesConfig) GetPodSecurityContext() *api.PodSecurityContext { podSecurityContext := c.PodSecurityContext if podSecurityContext.FSGroup == nil && podSecurityContext.RunAsGroup == nil && podSecurityContext.RunAsNonRoot == nil && podSecurityContext.RunAsUser == nil && len(podSecurityContext.SupplementalGroups) == 0 && podSecurityContext.SELinuxType == "" && podSecurityContext.SeccompProfile == nil && podSecurityContext.AppArmorProfile == nil { return nil } var seLinuxOptions *api.SELinuxOptions if podSecurityContext.SELinuxType != "" { seLinuxOptions = &api.SELinuxOptions{Type: podSecurityContext.SELinuxType} } return &api.PodSecurityContext{ FSGroup: podSecurityContext.FSGroup, RunAsGroup: podSecurityContext.RunAsGroup, RunAsNonRoot: podSecurityContext.RunAsNonRoot, RunAsUser: podSecurityContext.RunAsUser, SupplementalGroups: podSecurityContext.SupplementalGroups, SELinuxOptions: seLinuxOptions, SeccompProfile: podSecurityContext.SeccompProfile.toAPI(), AppArmorProfile: podSecurityContext.AppArmorProfile.toAPI(), } } func (c *KubernetesConfig) GetAffinity() *api.Affinity { var affinity api.Affinity if c.Affinity.NodeAffinity != nil { affinity.NodeAffinity = c.GetNodeAffinity() } if c.Affinity.PodAffinity != nil { affinity.PodAffinity = c.GetPodAffinity() } if c.Affinity.PodAntiAffinity != nil { affinity.PodAntiAffinity = c.GetPodAntiAffinity() } return &affinity } func (c *KubernetesConfig) GetDNSConfig() *api.PodDNSConfig { if len(c.DNSConfig.Nameservers) == 0 && len(c.DNSConfig.Searches) == 0 && len(c.DNSConfig.Options) == 0 { return nil } var config api.PodDNSConfig config.Nameservers = c.DNSConfig.Nameservers config.Searches = c.DNSConfig.Searches for _, opt := range c.DNSConfig.Options { config.Options = append(config.Options, api.PodDNSConfigOption{ Name: opt.Name, Value: opt.Value, }) } return &config } func (c *KubernetesConfig) GetNodeAffinity() *api.NodeAffinity { var nodeAffinity api.NodeAffinity if c.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution != nil { nodeSelector := c.Affinity.NodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.GetNodeSelector() nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution = nodeSelector } for _, preferred := range c.Affinity.NodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution { nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution = append( nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution, preferred.GetPreferredSchedulingTerm(), ) } return &nodeAffinity } // GetContainerLifecycle returns the container lifecycle configuration func (c *KubernetesConfig) GetContainerLifecycle() KubernetesContainerLifecyle { return c.ContainerLifecycle } func (c *NodeSelector) GetNodeSelector() *api.NodeSelector { var nodeSelector api.NodeSelector for _, selector := range c.NodeSelectorTerms { nodeSelector.NodeSelectorTerms = append(nodeSelector.NodeSelectorTerms, selector.GetNodeSelectorTerm()) } return &nodeSelector } func (c *NodeSelectorRequirement) GetNodeSelectorRequirement() api.NodeSelectorRequirement { return api.NodeSelectorRequirement{ Key: c.Key, Operator: api.NodeSelectorOperator(c.Operator), Values: c.Values, } } func (c *LabelSelector) GetLabelSelectorMatchExpressions() []metav1.LabelSelectorRequirement { var labelSelectorRequirement []metav1.LabelSelectorRequirement for _, label := range c.MatchExpressions { expression := metav1.LabelSelectorRequirement{ Key: label.Key, Operator: metav1.LabelSelectorOperator(label.Operator), Values: label.Values, } labelSelectorRequirement = append(labelSelectorRequirement, expression) } return labelSelectorRequirement } func (c *KubernetesConfig) GetPodAffinity() *api.PodAffinity { var podAffinity api.PodAffinity for _, required := range c.Affinity.PodAffinity.RequiredDuringSchedulingIgnoredDuringExecution { podAffinity.RequiredDuringSchedulingIgnoredDuringExecution = append( podAffinity.RequiredDuringSchedulingIgnoredDuringExecution, required.GetPodAffinityTerm(), ) } for _, preferred := range c.Affinity.PodAffinity.PreferredDuringSchedulingIgnoredDuringExecution { podAffinity.PreferredDuringSchedulingIgnoredDuringExecution = append( podAffinity.PreferredDuringSchedulingIgnoredDuringExecution, preferred.GetWeightedPodAffinityTerm(), ) } return &podAffinity } func (c *KubernetesConfig) GetPodAntiAffinity() *api.PodAntiAffinity { var podAntiAffinity api.PodAntiAffinity for _, required := range c.Affinity.PodAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution { podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution = append( podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution, required.GetPodAffinityTerm(), ) } for _, preferred := range c.Affinity.PodAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution { podAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution = append( podAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution, preferred.GetWeightedPodAffinityTerm(), ) } return &podAntiAffinity } func (c *PodAffinityTerm) GetPodAffinityTerm() api.PodAffinityTerm { return api.PodAffinityTerm{ LabelSelector: c.GetLabelSelector(), Namespaces: c.Namespaces, TopologyKey: c.TopologyKey, NamespaceSelector: c.GetNamespaceSelector(), } } func (c *WeightedPodAffinityTerm) GetWeightedPodAffinityTerm() api.WeightedPodAffinityTerm { return api.WeightedPodAffinityTerm{ Weight: c.Weight, PodAffinityTerm: c.PodAffinityTerm.GetPodAffinityTerm(), } } func (c *NodeSelectorTerm) GetNodeSelectorTerm() api.NodeSelectorTerm { nodeSelectorTerm := api.NodeSelectorTerm{} for _, expression := range c.MatchExpressions { nodeSelectorTerm.MatchExpressions = append( nodeSelectorTerm.MatchExpressions, expression.GetNodeSelectorRequirement(), ) } for _, fields := range c.MatchFields { nodeSelectorTerm.MatchFields = append( nodeSelectorTerm.MatchFields, fields.GetNodeSelectorRequirement(), ) } return nodeSelectorTerm } func (c *PreferredSchedulingTerm) GetPreferredSchedulingTerm() api.PreferredSchedulingTerm { return api.PreferredSchedulingTerm{ Weight: c.Weight, Preference: c.Preference.GetNodeSelectorTerm(), } } func (c *PodAffinityTerm) GetLabelSelector() *metav1.LabelSelector { if c.LabelSelector == nil { return nil } return &metav1.LabelSelector{ MatchLabels: c.LabelSelector.MatchLabels, MatchExpressions: c.LabelSelector.GetLabelSelectorMatchExpressions(), } } func (c *PodAffinityTerm) GetNamespaceSelector() *metav1.LabelSelector { if c.NamespaceSelector == nil { return nil } return &metav1.LabelSelector{ MatchLabels: c.NamespaceSelector.MatchLabels, MatchExpressions: c.NamespaceSelector.GetLabelSelectorMatchExpressions(), } } func (c *KubernetesConfig) GetHostAliases() []api.HostAlias { var hostAliases []api.HostAlias for _, hostAlias := range c.HostAliases { hostAliases = append( hostAliases, api.HostAlias{ IP: hostAlias.IP, Hostnames: hostAlias.Hostnames, }, ) } return hostAliases } // GetExpandedServices returns the executor-configured services, with the values expanded. This is necessary because // some of the values in service definition can point to job variables, so the final value is job-dependant. // See: https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29499 func (c *KubernetesConfig) GetExpandedServices(vars spec.Variables) []Service { return getExpandedServices(c.Services, vars) } func (c *KubernetesConfig) GetPrintPodWarningEvents() bool { if c.PrintPodWarningEvents == nil { return true } return *c.PrintPodWarningEvents } func (c *KubernetesConfig) GetPodDisruptionBudget() bool { if c.PodDisruptionBudget == nil { return false } return *c.PodDisruptionBudget } func (c *DockerMachine) GetIdleCount() int { autoscaling := c.getActiveAutoscalingConfig() if autoscaling != nil { return autoscaling.IdleCount } return c.IdleCount } func (c *DockerMachine) GetIdleCountMin() int { autoscaling := c.getActiveAutoscalingConfig() if autoscaling != nil { return autoscaling.IdleCountMin } return c.IdleCountMin } func (c *DockerMachine) GetIdleScaleFactor() float64 { autoscaling := c.getActiveAutoscalingConfig() if autoscaling != nil { return autoscaling.IdleScaleFactor } return c.IdleScaleFactor } func (c *DockerMachine) GetIdleTime() int { autoscaling := c.getActiveAutoscalingConfig() if autoscaling != nil { return autoscaling.IdleTime } return c.IdleTime } // getActiveAutoscalingConfig returns the autoscaling config matching the current time. // It goes through the [[docker.machine.autoscaling]] entries and returns the last one to match. // Returns nil on no matching entries. func (c *DockerMachine) getActiveAutoscalingConfig() *DockerMachineAutoscaling { var activeConf *DockerMachineAutoscaling for _, conf := range c.AutoscalingConfigs { if conf.compiledPeriods.InPeriod() { activeConf = conf } } return activeConf } func (c *DockerMachine) CompilePeriods() error { var err error for _, a := range c.AutoscalingConfigs { err = a.compilePeriods() if err != nil { return err } } return nil } var periodTimer = time.Now func (a *DockerMachineAutoscaling) compilePeriods() error { periods, err := timeperiod.TimePeriodsWithTimer(a.Periods, a.Timezone, periodTimer) if err != nil { return NewInvalidTimePeriodsError(a.Periods, err) } a.compiledPeriods = periods return nil } func (c *DockerMachine) logDeprecationWarning() { if len(c.OffPeakPeriods) != 0 { logrus.Warning("OffPeak docker machine configuration is deprecated and has been removed since 14.0. " + "Please convert the setting into a [[docker.machine.autoscaling]] configuration instead: " + "https://docs.gitlab.com/runner/configuration/autoscale/#off-peak-time-mode-configuration-deprecated") } } const ( defaultShutdownDrainConcurrency = 3 defaultShutdownDrainMaxRetries = 3 defaultShutdownDrainRetryBackoff = 5 * time.Second ) func (c DockerMachineShutdownDrain) IsEnabled() bool { return c.Enabled } func (c DockerMachineShutdownDrain) GetConcurrency() int { if c.Concurrency <= 0 { return defaultShutdownDrainConcurrency } return c.Concurrency } func (c DockerMachineShutdownDrain) GetMaxRetries() int { if c.MaxRetries <= 0 { return defaultShutdownDrainMaxRetries } return c.MaxRetries } func (c DockerMachineShutdownDrain) GetRetryBackoff() time.Duration { if c.RetryBackoff <= 0 { return defaultShutdownDrainRetryBackoff } return c.RetryBackoff } func (c *RunnerCredentials) GetURL() string { return c.URL } func (c *RunnerCredentials) GetTLSCAFile() string { return c.TLSCAFile } func (c *RunnerCredentials) GetTLSCertFile() string { return c.TLSCertFile } func (c *RunnerCredentials) GetTLSKeyFile() string { return c.TLSKeyFile } func (c *RunnerCredentials) GetToken() string { return c.Token } func (c *RunnerCredentials) ShortDescription() string { return helpers.ShortenToken(c.Token) } func (c *RunnerCredentials) UniqueID() string { // Shorten the token to ensure that it won't be exposed in logged messages. token := helpers.ShortenToken(c.Token) return c.URL + token } func (c *RunnerCredentials) SameAs(other *RunnerCredentials) bool { if c.Token != other.Token { return false } if wildcardURL(c.URL) || wildcardURL(other.URL) { return true } return c.URL == other.URL } func (c *RunnerConfig) String() string { return fmt.Sprintf("%v url=%v token=%v executor=%v", c.Name, c.URL, c.Token, c.Executor) } func (c *RunnerConfig) WarnOnLegacyCIURL() { if strings.HasSuffix(strings.TrimRight(c.URL, "/"), "/ci") { c.Log().Warning("The runner URL contains a legacy '/ci' suffix.\n" + " This suffix is deprecated and should be removed from the configuration.\n" + " Git submodules may fail to clone with authentication errors if this suffix is present.\n" + " Please update the 'url' field in your config.toml to remove the '/ci' suffix.\n" + " See https://docs.gitlab.com/runner/configuration/advanced-configuration.html#legacy-ci-url-suffix") } } func (c *RunnerConfig) GetSystemID() string { if c.SystemID == "" { return UnknownSystemID } return c.SystemID } func (c *RunnerConfig) GetUnhealthyRequestsLimit() int { if c.UnhealthyRequestsLimit < 1 { return DefaultUnhealthyRequestsLimit } return c.UnhealthyRequestsLimit } func (c *RunnerConfig) GetJobStatusFinalUpdateRetryLimit() int { if c.JobStatusFinalUpdateRetryLimit < 1 { return DefaultFinalUpdateRetryLimit } return c.JobStatusFinalUpdateRetryLimit } func (c *RunnerConfig) GetUnhealthyInterval() time.Duration { if c.UnhealthyInterval == nil { return DefaultUnhealthyInterval } return *c.UnhealthyInterval } func (c *RunnerConfig) GetRequestConcurrency() int { return max(1, c.RequestConcurrency) } func (c *RunnerConfig) GetStrictCheckInterval() bool { if c.StrictCheckInterval == nil { return false } return *c.StrictCheckInterval } func (c *RunnerConfig) GetVariables() spec.Variables { variables := spec.Variables{ {Key: "CI_RUNNER_SHORT_TOKEN", Value: c.ShortDescription(), Public: true, Internal: true, File: false}, } for _, environment := range c.Environment { if variable, err := parseVariable(environment); err == nil { variable.Internal = true variables = append(variables, variable) } } return variables } func (c *RunnerConfig) IsProxyExec() bool { if c.ProxyExec != nil { return *c.ProxyExec } return false } func (c *RunnerConfig) Log() *logrus.Entry { logger := c.Logger if logger == nil { logger = logrus.StandardLogger() } entry := logger.WithFields(logrus.Fields{}) if c.ShortDescription() != "" { entry = entry.WithField("runner", c.ShortDescription()) } if c.Name != "" { entry = entry.WithField("runner_name", c.Name) } return entry } // DeepCopy attempts to make a deep clone of the object func (c *RunnerConfig) DeepCopy() (*RunnerConfig, error) { var r RunnerConfig bytes, err := json.Marshal(c) if err != nil { return nil, fmt.Errorf("serialization of runner config failed: %w", err) } err = json.Unmarshal(bytes, &r) if err != nil { return nil, fmt.Errorf("deserialization of runner config failed: %w", err) } r.SystemID = c.SystemID r.ConfigLoadedAt = c.ConfigLoadedAt r.ConfigDir = c.ConfigDir if r.Monitoring != nil { err = r.Monitoring.Compile() if err != nil { return nil, fmt.Errorf("compiling monitoring sections: %w", err) } } return &r, err } // mask masks all sensitive fields on a Runner. // This should only run against a deep copy of a RunnerConfig. func (r *RunnerConfig) mask() { if r == nil { return } maskField(&r.Token) if k8s := r.Kubernetes; k8s != nil { maskField(&k8s.BearerToken) } if cache := r.Cache; cache != nil { if s3 := cache.S3; s3 != nil { maskField(&s3.AccessKey) maskField(&s3.SecretKey) maskField(&s3.SessionToken) } if gcs := cache.GCS; gcs != nil { maskField(&gcs.PrivateKey) } if azure := cache.Azure; azure != nil { maskField(&azure.AccountKey) } } } func NewConfigWithSaver(s ConfigSaver) *Config { c := NewConfig() c.ConfigSaver = s return c } func NewConfig() *Config { return &Config{ Concurrent: 1, SessionServer: SessionServer{ SessionTimeout: int(DefaultSessionTimeout.Seconds()), }, } } // DeepCopy returns a deep clone of the config struct. func (c *Config) DeepCopy() (*Config, error) { var d Config b, err := json.Marshal(c) if err != nil { return nil, fmt.Errorf("serialize config: %w", err) } if err = json.Unmarshal(b, &d); err != nil { return nil, fmt.Errorf("deserialize config: %w", err) } return &d, nil } // Masked returns a copy of the config struct with sensitive fields masked. func (c *Config) Masked() (*Config, error) { m, err := c.DeepCopy() if err != nil { return nil, fmt.Errorf("deep copy config: %w", err) } for _, r := range m.Runners { r.mask() } return m, nil } func (c *Config) StatConfig(configFile string) error { _, err := os.Stat(configFile) if err != nil { return err } return nil } func (c *Config) LoadConfig(configFile string) error { info, err := os.Stat(configFile) // permission denied is soft error if os.IsNotExist(err) { return nil } else if err != nil { return err } if _, err = toml.DecodeFile(configFile, c); err != nil { return fmt.Errorf("decoding configuration file: %w", err) } for _, r := range c.Runners { err := r.loadConfig(c) if err != nil { return fmt.Errorf("loading coniguration for %s runner: %w", r.Name, err) } } // config built-in validation is blocking when doesn't pass err = c.Validate() if err != nil { return fmt.Errorf("invalid config: %w", err) } c.ModTime = info.ModTime() if c.ConnectionMaxAge == nil { defaultValue := DefaultConnectionMaxAge c.ConnectionMaxAge = &defaultValue } c.Loaded = true return nil } func (c *RunnerConfig) loadConfig(globalCfg *Config) error { // Expand environment variables in credentials c.Token = os.ExpandEnv(c.Token) c.URL = os.ExpandEnv(c.URL) if c.Machine != nil { err := c.Machine.CompilePeriods() if err != nil { return fmt.Errorf("compiling docker machine autoscaling periods: %w", err) } c.Machine.logDeprecationWarning() } if c.Monitoring != nil { err := c.Monitoring.Compile() if err != nil { return fmt.Errorf("compiling monitoring sections: %w", err) } } c.RunnerSettings.ComputeLabels(globalCfg.Labels) return nil } func (c *Config) SaveConfig(configFile string) error { var newConfig bytes.Buffer newBuffer := bufio.NewWriter(&newConfig) if err := toml.NewEncoder(newBuffer).Encode(c); err != nil { logrus.Fatalf("Error encoding TOML: %s", err) return err } if err := newBuffer.Flush(); err != nil { return err } if c.ConfigSaver == nil { c.ConfigSaver = new(defaultConfigSaver) } if err := c.ConfigSaver.Save(configFile, newConfig.Bytes()); err != nil { return err } c.ModTime = time.Now() c.Loaded = true return nil } func (c *Config) GetCheckInterval() time.Duration { if c.CheckInterval > 0 { return time.Duration(c.CheckInterval) * time.Second } return CheckInterval } func (c *Config) GetShutdownTimeout() time.Duration { if c.ShutdownTimeout > 0 { return time.Duration(c.ShutdownTimeout) * time.Second } return DefaultShutdownTimeout } // maskField masks the content of a string field // if it is not empty. func maskField(field *string) { if field != nil && *field != "" { *field = mask } } func (c *Config) RunnerByName(name string) (*RunnerConfig, error) { for _, runner := range c.Runners { if runner.Name == name { return runner, nil } } return nil, fmt.Errorf("could not find a runner with the name '%s'", name) } func (c *Config) RunnerByToken(token string) (*RunnerConfig, error) { for _, runner := range c.Runners { if runner.Token == token { return runner, nil } } return nil, fmt.Errorf("could not find a runner with the token '%s'", helpers.ShortenToken(token)) } func (c *Config) RunnerByURLAndID(url string, id int64) (*RunnerConfig, error) { for _, runner := range c.Runners { if runner.URL == url && runner.ID == id { return runner, nil } } return nil, fmt.Errorf("could not find a runner with the URL %q and ID %d", url, id) } func (c *Config) RunnerByNameAndToken(name string, token string) (*RunnerConfig, error) { for _, runner := range c.Runners { if runner.Name == name && runner.Token == token { return runner, nil } } return nil, fmt.Errorf("could not find a runner with the Name '%s' and Token '%s'", name, token) } func (c *Config) Validate() error { for vn, v := range map[string]func() error{ "global labels": c.validateLabels, } { err := v() if err != nil { return fmt.Errorf("validating %s: %w", vn, err) } } for _, r := range c.Runners { err := r.Validate() if err != nil { return fmt.Errorf("validating runner %s: %w", r.Name, err) } } return nil } func (c *Config) validateLabels() error { return c.Labels.validatePatterns() } func (c *RunnerConfig) Validate() error { for vn, v := range map[string]func() error{ "labels": c.validateLabels, "computed labels": c.validateComputedLabels, "slot cgroups": c.validateSlotCgroups, "machine options with name": c.validateMachineOptionsWithName, } { err := v() if err != nil { return fmt.Errorf("validating %s: %w", vn, err) } } return nil } func (c *RunnerConfig) validateLabels() error { return c.Labels.validatePatterns() } func (c *RunnerConfig) validateComputedLabels() error { return c.labels.validateCount() } func (c *RunnerConfig) validateSlotCgroups() error { if !c.UseSlotCgroups { return nil } // Validate main slot cgroup template template := c.SlotCgroupTemplate if template == "" { template = DefaultSlotCgroupTemplate } validateSlotCgroupTemplate(template, "slot_cgroup_template") // Validate service slot cgroup template if configured if c.Docker != nil && c.Docker.ServiceSlotCgroupTemplate != "" { validateSlotCgroupTemplate(c.Docker.ServiceSlotCgroupTemplate, "service_slot_cgroup_template") } return nil } func (c *RunnerConfig) validateMachineOptionsWithName() error { if c.Machine == nil { return nil } for _, opt := range c.Machine.MachineOptionsWithName { if !strings.Contains(opt, "%s") { return fmt.Errorf("machine option with name %q must contain %%s placeholder", opt) } } return nil } const DefaultSlotCgroupTemplate = "gitlab-runner/slot-${slot}" // GetSlot extracts the slot number from ExecutorData if available, otherwise returns -1 func GetSlot(data ExecutorData) int { if s, ok := data.(interface{ AcquisitionSlot() int }); ok { return s.AcquisitionSlot() } logrus.WithField("data_type", fmt.Sprintf("%T", data)). Debug("ExecutorData does not implement AcquisitionSlot() interface") return -1 } // GetSlotCgroupPath returns the cgroup path for the given slot and ExecutorData func (c *RunnerConfig) GetSlotCgroupPath(data ExecutorData) string { if !c.UseSlotCgroups { return "" } slot := GetSlot(data) if slot < 0 { return "" } template := c.SlotCgroupTemplate if template == "" { template = DefaultSlotCgroupTemplate } return expandSlotTemplate(template, slot) } // GetServiceSlotCgroupPath returns the cgroup path for service containers func (c *RunnerConfig) GetServiceSlotCgroupPath(data ExecutorData) string { if !c.UseSlotCgroups { return "" } slot := GetSlot(data) if slot < 0 { return "" } var template string if c.Docker != nil && c.Docker.ServiceSlotCgroupTemplate != "" { template = c.Docker.ServiceSlotCgroupTemplate } else { template = c.SlotCgroupTemplate if template == "" { template = DefaultSlotCgroupTemplate } } return expandSlotTemplate(template, slot) } // validateSlotCgroupTemplate checks if the template contains the ${slot} placeholder and logs a warning if not func validateSlotCgroupTemplate(template string, configName string) { if !strings.Contains(template, "${slot}") && !strings.Contains(template, "$slot") { logrus.WithFields(logrus.Fields{ "template": template, "config_name": configName, }).Warning("Slot cgroup template does not contain ${slot} placeholder. " + "All jobs will use the same cgroup, defeating the purpose of slot-based isolation. " + "Consider using a template like 'gitlab-runner/slot-${slot}'") } } // expandSlotTemplate replaces ${slot} placeholder with actual slot number using os.Expand func expandSlotTemplate(template string, slot int) string { slotStr := strconv.Itoa(slot) return os.Expand(template, func(name string) string { if name == "slot" { return slotStr } return "" }) } func parseVariable(text string) (variable spec.Variable, err error) { keyValue := strings.SplitN(text, "=", 2) if len(keyValue) != 2 { err = errors.New("missing =") return } variable = spec.Variable{ Key: keyValue[0], Value: keyValue[1], } return } // wildcardURL checks if the URL is a wildcard URL func wildcardURL(url string) bool { switch url { case "", "*": return true default: return false } } ================================================ FILE: common/config_log_options_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/stretchr/testify/assert" ) func TestDockerConfig_ValidateLogOptions(t *testing.T) { tests := []struct { name string logOptions map[string]string expectedErrMsg string }{ { name: "nil config", }, { name: "empty log options", logOptions: map[string]string{}, }, { name: "valid env option", logOptions: map[string]string{ "env": "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", }, }, { name: "valid labels option", logOptions: map[string]string{ "labels": "com.gitlab.gitlab-runner.type", }, }, { name: "valid env and labels options", logOptions: map[string]string{ "env": "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", "labels": "com.gitlab.gitlab-runner.type", }, }, { name: "invalid single option", logOptions: map[string]string{ "max-size": "10m", }, expectedErrMsg: `invalid log options: only ["env" "labels"] are allowed, but found: ["max-size"]`, }, { name: "invalid multiple options", logOptions: map[string]string{ "max-size": "10m", "max-file": "3", }, expectedErrMsg: `invalid log options: only ["env" "labels"] are allowed, but found: ["max-file" "max-size"]`, }, { name: "mixed valid and invalid options", logOptions: map[string]string{ "env": "CI_JOB_ID", "max-size": "10m", "labels": "job_name", }, expectedErrMsg: `invalid log options: only ["env" "labels"] are allowed, but found: ["max-size"]`, }, { name: "unknown option", logOptions: map[string]string{ "unknown-option": "value", }, expectedErrMsg: `invalid log options: only ["env" "labels"] are allowed, but found: ["unknown-option"]`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { dockerConfig := &DockerConfig{ LogOptions: tt.logOptions, } logConfig, err := dockerConfig.GetLogConfig() if tt.expectedErrMsg != "" { assert.Error(t, err) assert.Contains(t, err.Error(), tt.expectedErrMsg) } else { assert.NoError(t, err) assertMapMatches(t, tt.logOptions, logConfig.Config) } }) } } func assertMapMatches(t *testing.T, expected, actual map[string]string) { t.Helper() if len(expected) == 0 { assert.Len(t, actual, 0) return } assert.Equal(t, expected, actual) } ================================================ FILE: common/config_test.go ================================================ //go:build !integration package common import ( "flag" "fmt" "os" "testing" "time" "github.com/BurntSushi/toml" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" api "k8s.io/api/core/v1" clihelpers "gitlab.com/gitlab-org/golang-cli-helpers" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) func TestConfigParse(t *testing.T) { httpHeaders := []KubernetesLifecycleHTTPGetHeader{ {Name: "header_name_1", Value: "header_value_1"}, {Name: "header_name_2", Value: "header_value_2"}, } tests := map[string]struct { config string validateConfig func(t *testing.T, config *Config) expectedErr string }{ "parse Service as table with only name": { config: ` [[runners]] [[runners.docker.services]] name = "svc1" [[runners.docker.services]] name = "svc2" `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) require.Equal(t, 2, len(config.Runners[0].Docker.Services)) assert.Equal(t, "svc1", config.Runners[0].Docker.Services[0].Name) assert.Equal(t, "", config.Runners[0].Docker.Services[0].Alias) assert.Equal(t, "svc2", config.Runners[0].Docker.Services[1].Name) assert.Equal(t, "", config.Runners[0].Docker.Services[1].Alias) }, }, "parse Service as table with only alias": { config: ` [[runners]] [[runners.docker.services]] alias = "svc1" [[runners.docker.services]] alias = "svc2" `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) require.Equal(t, 2, len(config.Runners[0].Docker.Services)) assert.Equal(t, "", config.Runners[0].Docker.Services[0].Name) assert.Equal(t, "svc1", config.Runners[0].Docker.Services[0].Alias) assert.Equal(t, "", config.Runners[0].Docker.Services[1].Name) assert.Equal(t, "svc2", config.Runners[0].Docker.Services[1].Alias) }, }, "parse Service as table": { config: ` [[runners]] [[runners.docker.services]] name = "svc1" alias = "svc1_alias" [[runners.docker.services]] name = "svc2" alias = "svc2_alias" `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) require.Equal(t, 2, len(config.Runners[0].Docker.Services)) assert.Equal(t, "svc1", config.Runners[0].Docker.Services[0].Name) assert.Equal(t, "svc1_alias", config.Runners[0].Docker.Services[0].Alias) assert.Equal(t, "svc2", config.Runners[0].Docker.Services[1].Name) assert.Equal(t, "svc2_alias", config.Runners[0].Docker.Services[1].Alias) }, }, "parse Service as table int value name": { config: ` [[runners]] [[runners.docker.services]] name = 5 `, expectedErr: "incompatible types: TOML value has type int64; destination has type string", }, "parse Service as table int value alias": { config: ` [[runners]] [[runners.docker.services]] name = "svc1" alias = 5 `, expectedErr: "incompatible types: TOML value has type int64; destination has type string", }, "parse Service runners.docker and runners.docker.services": { config: ` [[runners]] [runners.docker] image = "image" [[runners.docker.services]] name = "svc1" [[runners.docker.services]] name = "svc2" `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) require.Equal(t, 2, len(config.Runners[0].Docker.Services)) assert.Equal(t, "image", config.Runners[0].Docker.Image) }, }, "parse Service runners.docker.services environment": { config: ` [[runners]] [runners.docker] [[runners.docker.services]] name = "svc1" environment = ["ENV1=value1", "ENV2=value2"] `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) require.Equal(t, 1, len(config.Runners[0].Docker.Services)) require.Equal(t, 2, len(config.Runners[0].Docker.Services[0].Environment)) assert.Equal(t, "ENV1=value1", config.Runners[0].Docker.Services[0].Environment[0]) assert.Equal(t, "ENV2=value2", config.Runners[0].Docker.Services[0].Environment[1]) }, }, "parse Docker Container Labels with string key and value": { config: ` [[runners]] [runners.docker] image = "image" [runners.docker.container_labels] "my.docker.TestContainerlabel1" = "TestContainerlabel-1" `, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 1, len(config.Runners)) runner := config.Runners[0] require.NotNil(t, runner.RunnerSettings.Docker.ContainerLabels) require.NotNil(t, runner.RunnerSettings.Docker.ContainerLabels["my.docker.TestContainerlabel1"]) require.Equal( t, "TestContainerlabel-1", runner.RunnerSettings.Docker.ContainerLabels["my.docker.TestContainerlabel1"], ) }, }, "parse Docker Container Labels with integer key and value": { config: ` [[runners]] [runners.docker] image = "image" [runners.docker.container_labels] 5 = 5 `, expectedErr: "incompatible types: TOML value has type int64; destination has type string", }, "parse Docker Container Labels with integer value": { config: ` [[runners]] [runners.docker] image = "image" [runners.docker.container_labels] "my.docker.TestContainerlabel1" = 5 `, expectedErr: "incompatible types: TOML value has type int64; destination has type string", }, "parse Docker Container Labels with integer key": { config: ` [[runners]] [runners.docker] image = "image" [runners.docker.container_labels] 5 = "TestContainerlabel-1" `, }, "check node affinities": { config: ` [[runners]] [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.node_affinity] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "cpu_speed" operator = "In" values = ["fast"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 50 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "core_count" operator = "In" values = ["high", "32"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "cpu_type" operator = "In" values = ["x86, arm", "i386"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 20 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_fields]] key = "zone" operator = "In" values = ["us-east"] [runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms]] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms.match_expressions]] key = "kubernetes.io/e2e-az-name" operator = "In" values = [ "e2e-az1", "e2e-az2" ] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms]] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms.match_fields]] key = "kubernetes.io/e2e-az-name/field" operator = "In" values = [ "e2e-az1" ] `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) require.NotNil(t, config.Runners[0].Kubernetes.Affinity) require.NotNil(t, config.Runners[0].Kubernetes.Affinity.NodeAffinity) nodeAffinity := config.Runners[0].Kubernetes.Affinity.NodeAffinity require.Len(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution, 3) assert.Equal(t, int32(100), nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Weight) require.NotNil(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Preference) require.Len(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Preference.MatchExpressions, 1) assert.Equal(t, "In", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Preference.MatchExpressions[0].Operator) assert.Equal(t, "cpu_speed", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Preference.MatchExpressions[0].Key) assert.Equal(t, "fast", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0].Preference.MatchExpressions[0].Values[0]) assert.Equal(t, int32(50), nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Weight) require.NotNil(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference) require.Len(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions, 2) assert.Equal(t, "In", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[0].Operator) assert.Equal(t, "core_count", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[0].Key) assert.Equal(t, []string{"high", "32"}, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[0].Values) assert.Equal(t, "In", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[1].Operator) assert.Equal(t, "cpu_type", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[1].Key) assert.Equal(t, []string{"x86, arm", "i386"}, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[1].Preference.MatchExpressions[1].Values) assert.Equal(t, int32(20), nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Weight) require.NotNil(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Preference) require.Len(t, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Preference.MatchFields, 1) assert.Equal(t, "zone", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Preference.MatchFields[0].Key) assert.Equal(t, "In", nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Preference.MatchFields[0].Operator) assert.Equal(t, []string{"us-east"}, nodeAffinity.PreferredDuringSchedulingIgnoredDuringExecution[2].Preference.MatchFields[0].Values) require.NotNil(t, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution) require.Len(t, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms, 2) require.Len(t, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions, 1) require.Len(t, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchFields, 0) assert.Equal(t, "kubernetes.io/e2e-az-name", nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Key) assert.Equal(t, "In", nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Operator) assert.Equal(t, []string{"e2e-az1", "e2e-az2"}, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[0].MatchExpressions[0].Values) assert.Equal(t, "kubernetes.io/e2e-az-name/field", nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[1].MatchFields[0].Key) assert.Equal(t, "In", nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[1].MatchFields[0].Operator) assert.Equal(t, []string{"e2e-az1"}, nodeAffinity.RequiredDuringSchedulingIgnoredDuringExecution.NodeSelectorTerms[1].MatchFields[0].Values) }, }, "check pod affinities": { config: ` [[runners]] [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.pod_affinity] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.namespace_selector] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.namespace_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) require.NotNil(t, config.Runners[0].Kubernetes.Affinity) require.NotNil(t, config.Runners[0].Kubernetes.Affinity.PodAffinity) podAffinity := config.Runners[0].Kubernetes.Affinity.PodAffinity require.Len(t, podAffinity.RequiredDuringSchedulingIgnoredDuringExecution, 1) required := podAffinity.RequiredDuringSchedulingIgnoredDuringExecution assert.Equal(t, "failure-domain.beta.kubernetes.io/zone", required[0].TopologyKey) assert.Equal(t, []string{"namespace_1", "namespace_2"}, required[0].Namespaces) require.NotNil(t, required[0].LabelSelector) require.Len(t, required[0].LabelSelector.MatchExpressions, 1) requiredMatchExp := required[0].LabelSelector.MatchExpressions[0] assert.Equal(t, "security", requiredMatchExp.Key) assert.Equal(t, "In", requiredMatchExp.Operator) assert.Equal(t, []string{"S1"}, requiredMatchExp.Values) require.NotNil(t, required[0].NamespaceSelector) require.Len(t, required[0].NamespaceSelector.MatchExpressions, 1) requiredMatchExp = required[0].NamespaceSelector.MatchExpressions[0] assert.Equal(t, "security", requiredMatchExp.Key) assert.Equal(t, "In", requiredMatchExp.Operator) assert.Equal(t, []string{"S1"}, requiredMatchExp.Values) require.Len(t, podAffinity.PreferredDuringSchedulingIgnoredDuringExecution, 1) preferred := podAffinity.PreferredDuringSchedulingIgnoredDuringExecution assert.Equal(t, int32(100), preferred[0].Weight) assert.Empty(t, preferred[0].PodAffinityTerm.Namespaces) assert.Equal(t, "failure-domain.beta.kubernetes.io/zone", preferred[0].PodAffinityTerm.TopologyKey) require.NotNil(t, preferred[0].PodAffinityTerm.LabelSelector) require.Len(t, preferred[0].PodAffinityTerm.LabelSelector.MatchExpressions, 1) preferredMatchExp := preferred[0].PodAffinityTerm.LabelSelector.MatchExpressions[0] assert.Equal(t, "security_2", preferredMatchExp.Key) assert.Equal(t, "In", preferredMatchExp.Operator) assert.Equal(t, []string{"S2"}, preferredMatchExp.Values) require.NotNil(t, preferred[0].PodAffinityTerm.NamespaceSelector) require.Len(t, preferred[0].PodAffinityTerm.NamespaceSelector.MatchExpressions, 1) preferredMatchExp = preferred[0].PodAffinityTerm.NamespaceSelector.MatchExpressions[0] assert.Equal(t, "security_2", preferredMatchExp.Key) assert.Equal(t, "In", preferredMatchExp.Operator) assert.Equal(t, []string{"S2"}, preferredMatchExp.Values) }, }, "check pod anti affinities": { config: ` [[runners]] [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.pod_anti_affinity] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) require.NotNil(t, config.Runners[0].Kubernetes.Affinity) require.NotNil(t, config.Runners[0].Kubernetes.Affinity.PodAntiAffinity) podAntiAffinity := config.Runners[0].Kubernetes.Affinity.PodAntiAffinity require.Len(t, podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution, 1) required := podAntiAffinity.RequiredDuringSchedulingIgnoredDuringExecution[0] assert.Equal(t, "failure-domain.beta.kubernetes.io/zone", required.TopologyKey) assert.Equal(t, []string{"namespace_1", "namespace_2"}, required.Namespaces) require.NotNil(t, required.LabelSelector) require.Len(t, required.LabelSelector.MatchExpressions, 1) requiredMatchExp := required.LabelSelector.MatchExpressions[0] assert.Equal(t, "security", requiredMatchExp.Key) assert.Equal(t, "In", requiredMatchExp.Operator) assert.Equal(t, []string{"S1"}, requiredMatchExp.Values) require.NotNil(t, required.NamespaceSelector) require.Len(t, required.NamespaceSelector.MatchExpressions, 1) requiredMatchExp = required.NamespaceSelector.MatchExpressions[0] assert.Equal(t, "security", requiredMatchExp.Key) assert.Equal(t, "In", requiredMatchExp.Operator) assert.Equal(t, []string{"S1"}, requiredMatchExp.Values) require.Len(t, podAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution, 1) preferred := podAntiAffinity.PreferredDuringSchedulingIgnoredDuringExecution[0] assert.Equal(t, int32(100), preferred.Weight) assert.Empty(t, preferred.PodAffinityTerm.Namespaces) assert.Equal(t, "failure-domain.beta.kubernetes.io/zone", preferred.PodAffinityTerm.TopologyKey) require.NotNil(t, preferred.PodAffinityTerm.LabelSelector) require.Len(t, preferred.PodAffinityTerm.LabelSelector.MatchExpressions, 1) preferredMatchExp := preferred.PodAffinityTerm.LabelSelector.MatchExpressions[0] assert.Equal(t, "security_2", preferredMatchExp.Key) assert.Equal(t, "In", preferredMatchExp.Operator) assert.Equal(t, []string{"S2"}, preferredMatchExp.Values) require.NotNil(t, preferred.PodAffinityTerm.NamespaceSelector) require.Len(t, preferred.PodAffinityTerm.NamespaceSelector.MatchExpressions, 1) preferredMatchExp = preferred.PodAffinityTerm.NamespaceSelector.MatchExpressions[0] assert.Equal(t, "security_2", preferredMatchExp.Key) assert.Equal(t, "In", preferredMatchExp.Operator) assert.Equal(t, []string{"S2"}, preferredMatchExp.Values) }, }, "check that GracefulKillTimeout and ForceKillTimeout can't be set": { config: ` [[runners]] GracefulKillTimeout = 30 ForceKillTimeout = 10 `, validateConfig: func(t *testing.T, config *Config) { assert.Nil(t, config.Runners[0].GracefulKillTimeout) assert.Nil(t, config.Runners[0].ForceKillTimeout) }, }, "setting DNS policy to none": { config: ` [[runners]] [runners.kubernetes] dns_policy = 'none' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.NoError(t, err) assert.Equal(t, api.DNSNone, dnsPolicy) }, }, "setting DNS policy to default": { config: ` [[runners]] [runners.kubernetes] dns_policy = 'default' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.NoError(t, err) assert.Equal(t, api.DNSDefault, dnsPolicy) }, }, "setting DNS policy to cluster-first": { config: ` [[runners]] [runners.kubernetes] dns_policy = 'cluster-first' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.NoError(t, err) assert.Equal(t, api.DNSClusterFirst, dnsPolicy) }, }, "setting DNS policy to cluster-first-with-host-net": { config: ` [[runners]] [runners.kubernetes] dns_policy = 'cluster-first-with-host-net' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.NoError(t, err) assert.Equal(t, api.DNSClusterFirstWithHostNet, dnsPolicy) }, }, "fail setting DNS policy to invalid value": { config: ` [[runners]] [runners.kubernetes] dns_policy = 'some-invalid-policy' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.Error(t, err) assert.Empty(t, dnsPolicy) }, }, "fail setting DNS policy to empty value returns default value": { config: ` [[runners]] [runners.kubernetes] dns_policy = '' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) dnsPolicy, err := config.Runners[0].Kubernetes.DNSPolicy.Get() assert.NoError(t, err) assert.Equal(t, api.DNSClusterFirst, dnsPolicy) }, }, "check empty container lifecycle": { config: ` [[runners]] [runners.kubernetes] namespace = "default" `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.Nil(t, lifecycleCfg.PostStart) assert.Nil(t, lifecycleCfg.PreStop) }, }, "check postStart execAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.post_start.exec] command = ["ls", "-l"] `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PostStart) assert.Equal(t, []string{"ls", "-l"}, lifecycleCfg.PostStart.Exec.Command) assert.Nil(t, nil, lifecycleCfg.PostStart.HTTPGet) assert.Nil(t, nil, lifecycleCfg.PostStart.TCPSocket) }, }, "check postStart httpGetAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.post_start.http_get] port = 8080 host = "localhost" path = "/test" [[runners.kubernetes.container_lifecycle.post_start.http_get.http_headers]] name = "header_name_1" value = "header_value_1" [[runners.kubernetes.container_lifecycle.post_start.http_get.http_headers]] name = "header_name_2" value = "header_value_2" `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PostStart) assert.Equal(t, 8080, lifecycleCfg.PostStart.HTTPGet.Port) assert.Equal(t, "localhost", lifecycleCfg.PostStart.HTTPGet.Host) assert.Equal(t, "/test", lifecycleCfg.PostStart.HTTPGet.Path) assert.Equal(t, httpHeaders, lifecycleCfg.PostStart.HTTPGet.HTTPHeaders) }, }, "check postStart tcpSocketAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.post_start.tcp_socket] port = 8080 host = "localhost" `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PostStart) assert.Equal(t, 8080, lifecycleCfg.PostStart.TCPSocket.Port) assert.Equal(t, "localhost", lifecycleCfg.PostStart.TCPSocket.Host) }, }, "check preStop execAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.pre_stop.exec] command = ["ls", "-l"] `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PreStop) assert.Equal(t, []string{"ls", "-l"}, lifecycleCfg.PreStop.Exec.Command) assert.Nil(t, nil, lifecycleCfg.PreStop.HTTPGet) assert.Nil(t, nil, lifecycleCfg.PreStop.TCPSocket) }, }, "check preStop httpGetAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.pre_stop.http_get] port = 8080 host = "localhost" path = "/test" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_1" value = "header_value_1" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_2" value = "header_value_2" `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PreStop) assert.Equal(t, 8080, lifecycleCfg.PreStop.HTTPGet.Port) assert.Equal(t, "localhost", lifecycleCfg.PreStop.HTTPGet.Host) assert.Equal(t, "/test", lifecycleCfg.PreStop.HTTPGet.Path) assert.Equal(t, httpHeaders, lifecycleCfg.PreStop.HTTPGet.HTTPHeaders) }, }, "check preStop tcpSocketAction configuration": { config: ` [[runners]] [runners.kubernetes] namespace = "default" [runners.kubernetes.container_lifecycle.pre_stop.tcp_socket] port = 8080 host = "localhost" `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) lifecycleCfg := config.Runners[0].Kubernetes.GetContainerLifecycle() assert.NotNil(t, lifecycleCfg.PreStop) assert.Equal(t, 8080, lifecycleCfg.PreStop.TCPSocket.Port) assert.Equal(t, "localhost", lifecycleCfg.PreStop.TCPSocket.Host) }, }, "setting Priority Class to priority-1": { config: ` [[runners]] [runners.kubernetes] priority_class_name = 'priority-1' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) priorityClassName := config.Runners[0].Kubernetes.PriorityClassName assert.Equal(t, "priority-1", priorityClassName) }, }, "setting scheduler_name to foobar": { config: ` [[runners]] [runners.kubernetes] scheduler_name = 'foobar' `, validateConfig: func(t *testing.T, config *Config) { require.Len(t, config.Runners, 1) schedulerName := config.Runners[0].Kubernetes.SchedulerName assert.Equal(t, "foobar", schedulerName) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { cfg := NewConfig() _, err := toml.Decode(tt.config, cfg) if tt.expectedErr != "" { assert.ErrorContains(t, err, tt.expectedErr) return } assert.NoError(t, err) if tt.validateConfig != nil { tt.validateConfig(t, cfg) } }) } } func TestKubernetesHostAliases(t *testing.T) { tests := map[string]struct { config KubernetesConfig expectedHostAliases []api.HostAlias }{ "parse Kubernetes HostAliases with empty list": { config: KubernetesConfig{}, expectedHostAliases: nil, }, "parse Kubernetes HostAliases with unique ips": { config: KubernetesConfig{ HostAliases: []KubernetesHostAliases{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web2"}, }, { IP: "192.168.1.1", Hostnames: []string{"web14", "web15"}, }, }, }, expectedHostAliases: []api.HostAlias{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web2"}, }, { IP: "192.168.1.1", Hostnames: []string{"web14", "web15"}, }, }, }, "parse Kubernetes HostAliases with duplicated ip": { config: KubernetesConfig{ HostAliases: []KubernetesHostAliases{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web2"}, }, { IP: "127.0.0.1", Hostnames: []string{"web14", "web15"}, }, }, }, expectedHostAliases: []api.HostAlias{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web2"}, }, { IP: "127.0.0.1", Hostnames: []string{"web14", "web15"}, }, }, }, "parse Kubernetes HostAliases with duplicated hostname": { config: KubernetesConfig{ HostAliases: []KubernetesHostAliases{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web1", "web2"}, }, { IP: "127.0.0.1", Hostnames: []string{"web1", "web15"}, }, }, }, expectedHostAliases: []api.HostAlias{ { IP: "127.0.0.1", Hostnames: []string{"web1", "web1", "web2"}, }, { IP: "127.0.0.1", Hostnames: []string{"web1", "web15"}, }, }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.Equal(t, tt.expectedHostAliases, tt.config.GetHostAliases()) }) } } func TestService_ToImageDefinition(t *testing.T) { tests := map[string]struct { service Service expectedImage spec.Image }{ "empty service": { service: Service{}, expectedImage: spec.Image{}, }, "only name": { service: Service{Name: "name"}, expectedImage: spec.Image{Name: "name"}, }, "only alias": { service: Service{Alias: "alias"}, expectedImage: spec.Image{Alias: "alias"}, }, "name and alias": { service: Service{Name: "name", Alias: "alias"}, expectedImage: spec.Image{Name: "name", Alias: "alias"}, }, "only aliases": { service: Service{Alias: "alias-1 alias-2"}, expectedImage: spec.Image{Alias: "alias-1 alias-2"}, }, "name and aliases": { service: Service{Name: "name", Alias: "alias-1 alias-2"}, expectedImage: spec.Image{Name: "name", Alias: "alias-1 alias-2"}, }, "command specified": { service: Service{Name: "name", Command: []string{"executable", "param1", "param2"}}, expectedImage: spec.Image{Name: "name", Command: []string{"executable", "param1", "param2"}}, }, "entrypoint specified": { service: Service{Name: "name", Entrypoint: []string{"executable", "param3", "param4"}}, expectedImage: spec.Image{Name: "name", Entrypoint: []string{"executable", "param3", "param4"}}, }, "command and entrypoint specified": { service: Service{ Name: "name", Command: []string{"executable", "param1", "param2"}, Entrypoint: []string{"executable", "param3", "param4"}, }, expectedImage: spec.Image{ Name: "name", Command: []string{"executable", "param1", "param2"}, Entrypoint: []string{"executable", "param3", "param4"}, }, }, "environment specified": { service: Service{Name: "name", Environment: []string{"ENV1=value1", "ENV2=value2"}}, expectedImage: spec.Image{Name: "name", Variables: spec.Variables{ {Key: "ENV1", Value: "value1", Internal: true}, {Key: "ENV2", Value: "value2", Internal: true}, }}, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.Equal(t, tt.expectedImage, tt.service.ToImageDefinition()) }) } } func TestDockerMachine(t *testing.T) { timeNow := func() time.Time { return time.Date(2020, 05, 05, 20, 00, 00, 0, time.Local) } activeTimePeriod := []string{fmt.Sprintf("* * %d * * * *", timeNow().Hour())} inactiveTimePeriod := []string{fmt.Sprintf("* * %d * * * *", timeNow().Add(2*time.Hour).Hour())} invalidTimePeriod := []string{"invalid period"} oldPeriodTimer := periodTimer defer func() { periodTimer = oldPeriodTimer }() periodTimer = timeNow tests := map[string]struct { config *DockerMachine expectedIdleCount int expectedIdleTime int expectedErr error }{ "global config only": { config: &DockerMachine{IdleCount: 1, IdleTime: 1000}, expectedIdleCount: 1, expectedIdleTime: 1000, }, "offpeak active ignored": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, OffPeakPeriods: activeTimePeriod, OffPeakIdleCount: 2, OffPeakIdleTime: 2000, }, expectedIdleCount: 1, expectedIdleTime: 1000, }, "offpeak inactive ignored": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, OffPeakPeriods: inactiveTimePeriod, OffPeakIdleCount: 2, OffPeakIdleTime: 2000, }, expectedIdleCount: 1, expectedIdleTime: 1000, }, "offpeak invalid format ignored": { config: &DockerMachine{ OffPeakPeriods: invalidTimePeriod, OffPeakIdleCount: 2, OffPeakIdleTime: 2000, }, expectedIdleCount: 0, expectedIdleTime: 0, }, "autoscaling config active": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, AutoscalingConfigs: []*DockerMachineAutoscaling{ { Periods: activeTimePeriod, IdleCount: 2, IdleTime: 2000, }, }, }, expectedIdleCount: 2, expectedIdleTime: 2000, }, "autoscaling config inactive": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, AutoscalingConfigs: []*DockerMachineAutoscaling{ { Periods: inactiveTimePeriod, IdleCount: 2, IdleTime: 2000, }, }, }, expectedIdleCount: 1, expectedIdleTime: 1000, }, "last matching autoscaling config is selected": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, AutoscalingConfigs: []*DockerMachineAutoscaling{ { Periods: activeTimePeriod, IdleCount: 2, IdleTime: 2000, }, { Periods: activeTimePeriod, IdleCount: 3, IdleTime: 3000, }, }, }, expectedIdleCount: 3, expectedIdleTime: 3000, }, "autoscaling overrides offpeak config": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, OffPeakPeriods: activeTimePeriod, OffPeakIdleCount: 2, OffPeakIdleTime: 2000, AutoscalingConfigs: []*DockerMachineAutoscaling{ { Periods: activeTimePeriod, IdleCount: 3, IdleTime: 3000, }, { Periods: activeTimePeriod, IdleCount: 4, IdleTime: 4000, }, { Periods: inactiveTimePeriod, IdleCount: 5, IdleTime: 5000, }, }, }, expectedIdleCount: 4, expectedIdleTime: 4000, }, "autoscaling invalid period config": { config: &DockerMachine{ IdleCount: 1, IdleTime: 1000, AutoscalingConfigs: []*DockerMachineAutoscaling{ { Periods: []string{"invalid period"}, IdleCount: 3, IdleTime: 3000, }, }, }, expectedIdleCount: 0, expectedIdleTime: 0, expectedErr: new(InvalidTimePeriodsError), }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { err := tt.config.CompilePeriods() if tt.expectedErr != nil { assert.ErrorIs(t, err, tt.expectedErr) return } assert.NoError(t, err, "should not return err on good period compile") assert.Equal(t, tt.expectedIdleCount, tt.config.GetIdleCount()) assert.Equal(t, tt.expectedIdleTime, tt.config.GetIdleTime()) }) } } func TestRunnerSettings_GetGracefulKillTimeout_GetForceKillTimeout(t *testing.T) { tests := map[string]struct { config RunnerSettings expectedGracefulKillTimeout time.Duration expectedForceKillTimeout time.Duration }{ "undefined": { config: RunnerSettings{}, expectedGracefulKillTimeout: process.GracefulTimeout, expectedForceKillTimeout: process.KillTimeout, }, "timeouts lower than 0": { config: RunnerSettings{ GracefulKillTimeout: func(i int) *int { return &i }(-10), ForceKillTimeout: func(i int) *int { return &i }(-10), }, expectedGracefulKillTimeout: process.GracefulTimeout, expectedForceKillTimeout: process.KillTimeout, }, "timeouts greater than 0": { config: RunnerSettings{ GracefulKillTimeout: func(i int) *int { return &i }(30), ForceKillTimeout: func(i int) *int { return &i }(15), }, expectedGracefulKillTimeout: 30 * time.Second, expectedForceKillTimeout: 15 * time.Second, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.Equal(t, tt.expectedGracefulKillTimeout, tt.config.GetGracefulKillTimeout()) assert.Equal(t, tt.expectedForceKillTimeout, tt.config.GetForceKillTimeout()) }) } } func TestDockerConfig_GetPullPolicies(t *testing.T) { tests := map[string]struct { config DockerConfig expectedPullPolicies []DockerPullPolicy expectedErr bool }{ "nil pull_policy": { config: DockerConfig{}, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways}, expectedErr: false, }, "empty pull_policy": { config: DockerConfig{PullPolicy: StringOrArray{}}, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways}, expectedErr: false, }, "empty string pull_policy": { config: DockerConfig{PullPolicy: StringOrArray{""}}, expectedErr: true, }, "known elements in pull_policy": { config: DockerConfig{ PullPolicy: StringOrArray{PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever}, }, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever}, expectedErr: false, }, "invalid pull_policy": { config: DockerConfig{PullPolicy: StringOrArray{"invalid"}}, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { policies, err := tt.config.GetPullPolicies() if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedPullPolicies, policies) }) } } func TestDockerConfig_GetAllowedPullPolicies(t *testing.T) { tests := map[string]struct { config DockerConfig expectedPullPolicies []DockerPullPolicy expectedErr bool }{ "nil allowed_pull_policies": { config: DockerConfig{}, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways}, expectedErr: false, }, "empty allowed_pull_policies": { config: DockerConfig{AllowedPullPolicies: []DockerPullPolicy{}}, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways}, expectedErr: false, }, "empty string allowed_pull_policies": { config: DockerConfig{AllowedPullPolicies: []DockerPullPolicy{""}}, expectedErr: true, }, "known elements in allowed_pull_policies": { config: DockerConfig{ AllowedPullPolicies: []DockerPullPolicy{PullPolicyAlways, PullPolicyNever}, }, expectedPullPolicies: []DockerPullPolicy{PullPolicyAlways, PullPolicyNever}, expectedErr: false, }, "invalid allowed_pull_policies": { config: DockerConfig{AllowedPullPolicies: []DockerPullPolicy{"invalid"}}, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { policies, err := tt.config.GetAllowedPullPolicies() if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedPullPolicies, policies) }) } } func TestKubernetesConfig_GetAllowedPullPolicies(t *testing.T) { tests := map[string]struct { config KubernetesConfig expectedPullPolicies []api.PullPolicy expectedErr bool }{ "nil allowed_pull_policies": { config: KubernetesConfig{}, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "empty allowed_pull_policies": { config: KubernetesConfig{ AllowedPullPolicies: []DockerPullPolicy{}, }, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "empty string allowed_pull_policies": { config: KubernetesConfig{ AllowedPullPolicies: []DockerPullPolicy{""}, }, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "known elements in allowed_pull_policies": { config: KubernetesConfig{ AllowedPullPolicies: []DockerPullPolicy{PullPolicyAlways, PullPolicyNever}, }, expectedPullPolicies: []api.PullPolicy{api.PullAlways, api.PullNever}, expectedErr: false, }, "invalid allowed_pull_policies": { config: KubernetesConfig{ AllowedPullPolicies: []DockerPullPolicy{"invalid"}, }, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { policies, err := tt.config.GetAllowedPullPolicies() if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedPullPolicies, policies) }) } } func TestKubernetesConfig_GetPullPolicies(t *testing.T) { tests := map[string]struct { config KubernetesConfig expectedPullPolicies []api.PullPolicy expectedErr bool }{ "nil pull_policy": { config: KubernetesConfig{}, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "empty pull_policy": { config: KubernetesConfig{PullPolicy: StringOrArray{}}, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "empty string pull_policy": { config: KubernetesConfig{PullPolicy: StringOrArray{""}}, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: false, }, "known elements in pull_policy": { config: KubernetesConfig{ PullPolicy: StringOrArray{PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever}, }, expectedPullPolicies: []api.PullPolicy{api.PullAlways, api.PullIfNotPresent, api.PullNever}, expectedErr: false, }, "invalid pull_policy": { config: KubernetesConfig{PullPolicy: StringOrArray{"invalid"}}, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { policies, err := tt.config.GetPullPolicies() if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedPullPolicies, policies) }) } } func TestKubernetesConfig_ConvertFromDockerPullPolicy(t *testing.T) { tests := map[string]struct { config KubernetesConfig dockerPullPolicies []DockerPullPolicy expectedPullPolicies []api.PullPolicy expectedErr bool }{ "valid list": { config: KubernetesConfig{}, dockerPullPolicies: []DockerPullPolicy{PullPolicyAlways, PullPolicyIfNotPresent, PullPolicyNever}, expectedPullPolicies: []api.PullPolicy{api.PullAlways, api.PullIfNotPresent, api.PullNever}, expectedErr: false, }, "has an invalid pull policy": { config: KubernetesConfig{}, dockerPullPolicies: []DockerPullPolicy{PullPolicyAlways, "invalid"}, expectedPullPolicies: []api.PullPolicy{""}, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { policies, err := tt.config.ConvertFromDockerPullPolicy(tt.dockerPullPolicies) if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedPullPolicies, policies) }) } } func TestStringOrArray_UnmarshalTOML(t *testing.T) { tests := map[string]struct { toml string expectedResult StringOrArray expectedErr bool }{ "no fields": { toml: "", expectedResult: nil, expectedErr: false, }, "empty string_or_array": { toml: `string_or_array = ""`, expectedResult: StringOrArray{""}, expectedErr: false, }, "string": { toml: `string_or_array = "always"`, expectedResult: StringOrArray{"always"}, expectedErr: false, }, "slice with invalid single value": { toml: `string_or_array = 10`, expectedErr: true, }, "valid slice with multiple values": { toml: `string_or_array = ["unknown", "always"]`, expectedResult: StringOrArray{"unknown", "always"}, expectedErr: false, }, "slice with mixed values": { toml: `string_or_array = ["unknown", 10]`, expectedErr: true, }, "slice with invalid values": { toml: `string_or_array = [true, false]`, expectedErr: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { type Config struct { StringOrArray StringOrArray `toml:"string_or_array"` } var result Config _, err := toml.Decode(tt.toml, &result) if tt.expectedErr { assert.Error(t, err) return } assert.NoError(t, err) assert.Equal(t, tt.expectedResult, result.StringOrArray) }) } } func TestKubernetesNFS_UnmarshalTOML(t *testing.T) { tests := map[string]struct { toml string expectedResult KubernetesNFS expectedErr string }{ "all required fields present": { toml: ` name = "nfs-vol" mount_path = "/mnt/data" server = "nfs.example.com" path = "/exports/data" `, expectedResult: KubernetesNFS{ Name: "nfs-vol", MountPath: "/mnt/data", Server: "nfs.example.com", Path: "/exports/data", }, }, "optional fields set": { toml: ` name = "nfs-vol" mount_path = "/mnt/data" server = "nfs.example.com" path = "/exports/data" sub_path = "subdir" read_only = true `, expectedResult: KubernetesNFS{ Name: "nfs-vol", MountPath: "/mnt/data", Server: "nfs.example.com", Path: "/exports/data", SubPath: "subdir", ReadOnly: true, }, }, "missing name": { toml: ` mount_path = "/mnt/data" server = "nfs.example.com" path = "/exports/data" `, expectedErr: "name", }, "missing mount_path": { toml: ` name = "nfs-vol" server = "nfs.example.com" path = "/exports/data" `, expectedErr: "mount_path", }, "missing server": { toml: ` name = "nfs-vol" mount_path = "/mnt/data" path = "/exports/data" `, expectedErr: "server", }, "missing path": { toml: ` name = "nfs-vol" mount_path = "/mnt/data" server = "nfs.example.com" `, expectedErr: "path", }, "all required fields missing": { toml: `read_only = true`, expectedErr: "name, mount_path, server, path", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { type Config struct { NFS KubernetesNFS `toml:"nfs"` } var result Config _, err := toml.Decode("[nfs]\n"+tt.toml, &result) if tt.expectedErr != "" { require.Error(t, err) assert.Contains(t, err.Error(), tt.expectedErr) return } require.NoError(t, err) assert.Equal(t, tt.expectedResult, result.NFS) }) } } func TestAutoscalerPolicyConfig_PreemptiveModeEnabled(t *testing.T) { tests := map[string]struct { internalValue *bool idleCount int expectedValue bool }{ "should return enabled when flag is true": { internalValue: ptr(true), expectedValue: true, }, "should return turned off when flag is false": { internalValue: ptr(false), expectedValue: false, }, "should return turned off when flag is false and idle count is greater than zero": { idleCount: 10, internalValue: ptr(false), expectedValue: false, }, "should return turned off when value is not set and the idle count is zero": { idleCount: 0, internalValue: nil, expectedValue: false, }, "should return enabled when value is not set and the idle count is greater than zero": { idleCount: 10, internalValue: nil, expectedValue: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { config := AutoscalerPolicyConfig{ PreemptiveMode: tt.internalValue, IdleCount: tt.idleCount, } result := config.PreemptiveModeEnabled() assert.Equal(t, tt.expectedValue, result) }) } } func TestRunnerSettings_IsFeatureFlagOn(t *testing.T) { tests := map[string]struct { featureFlags map[string]bool name string expectedValue bool }{ "feature flag not configured": { featureFlags: map[string]bool{}, name: t.Name(), expectedValue: false, }, "feature flag not configured but feature flag default is true": { featureFlags: map[string]bool{}, name: featureflags.UseDirectDownload, expectedValue: true, }, "feature flag on": { featureFlags: map[string]bool{ t.Name(): true, }, name: t.Name(), expectedValue: true, }, "feature flag off": { featureFlags: map[string]bool{ featureflags.UseDirectDownload: false, }, name: t.Name(), expectedValue: false, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { cfg := RunnerConfig{ RunnerSettings: RunnerSettings{ FeatureFlags: tt.featureFlags, }, } on := cfg.IsFeatureFlagOn(tt.name) assert.Equal(t, tt.expectedValue, on) }) } } func TestEffectivePrivilege(t *testing.T) { tests := map[string]struct { pod bool container bool expected bool }{ "pod and container privileged": { pod: true, container: true, expected: true, }, "pod privileged": { pod: true, container: false, expected: false, }, "container privileged": { pod: false, container: true, expected: true, }, "all unprivileged": { pod: false, container: false, expected: false, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { effectivePrivileged := getContainerSecurityContextEffectiveFlagValue(&tt.container, &tt.pod) require.NotNil(t, effectivePrivileged) assert.Equal(t, tt.expected, *effectivePrivileged) }) } } func TestContainerSecurityContext(t *testing.T) { tests := map[string]struct { getSecurityContext func(c *KubernetesConfig) *api.SecurityContext getExpectedContainerSecurityContext func() *api.SecurityContext }{ "no container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{}) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{} }, }, "run as user - container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ RunAsUser: Int64Ptr(1000), }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { runAsUser := int64(1000) return &api.SecurityContext{ RunAsUser: &runAsUser, } }, }, "privileged - container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Privileged: ptr(true), }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ Privileged: ptr(true), } }, }, "container privileged override - container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { c.Privileged = ptr(true) return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Privileged: ptr(false), RunAsUser: Int64Ptr(65535), }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { runAsUser := int64(65535) return &api.SecurityContext{ Privileged: ptr(false), RunAsUser: &runAsUser, } }, }, "allow privilege escalation - not set on container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AllowPrivilegeEscalation: ptr(true), }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AllowPrivilegeEscalation: ptr(true), } }, }, "allow privilege escalation - set on container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { c.AllowPrivilegeEscalation = ptr(true) return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AllowPrivilegeEscalation: ptr(false), }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AllowPrivilegeEscalation: ptr(false), } }, }, "SELinux type label - container security context": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SELinuxType: "spc_t", }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SELinuxOptions: &api.SELinuxOptions{Type: "spc_t"}, } }, }, "proc mount - blank": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ ProcMount: "", }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ ProcMount: nil, } }, }, "proc mount - invalid": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ ProcMount: "invalid", }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ ProcMount: nil, } }, }, "proc mount - default": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ ProcMount: "default", }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { pm := api.DefaultProcMount return &api.SecurityContext{ ProcMount: &pm, } }, }, "proc mount - unmasked": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ ProcMount: "unmasked", }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { pm := api.UnmaskedProcMount return &api.SecurityContext{ ProcMount: &pm, } }, }, "seccomp profile - Unconfined": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Unconfined"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeUnconfined}, } }, }, "seccomp profile - RuntimeDefault": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "RuntimeDefault"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeRuntimeDefault}, } }, }, "seccomp profile - Localhost with profile": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Localhost", LocalhostProfile: "profiles/my-profile.json"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { localhostProfile := "profiles/my-profile.json" return &api.SecurityContext{ SeccompProfile: &api.SeccompProfile{ Type: api.SeccompProfileTypeLocalhost, LocalhostProfile: &localhostProfile, }, } }, }, "seccomp profile - Localhost without profile": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Localhost"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SeccompProfile: nil, } }, }, "seccomp profile - invalid type": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "InvalidValue"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SeccompProfile: nil, } }, }, "apparmor profile - Unconfined": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "Unconfined"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AppArmorProfile: &api.AppArmorProfile{Type: api.AppArmorProfileTypeUnconfined}, } }, }, "apparmor profile - RuntimeDefault": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "RuntimeDefault"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AppArmorProfile: &api.AppArmorProfile{Type: api.AppArmorProfileTypeRuntimeDefault}, } }, }, "apparmor profile - Localhost with profile": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "Localhost", LocalhostProfile: "my-apparmor-profile"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { localhostProfile := "my-apparmor-profile" return &api.SecurityContext{ AppArmorProfile: &api.AppArmorProfile{ Type: api.AppArmorProfileTypeLocalhost, LocalhostProfile: &localhostProfile, }, } }, }, "apparmor profile - Localhost without profile": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "Localhost"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AppArmorProfile: nil, } }, }, "apparmor profile - invalid type": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "BadValue"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ AppArmorProfile: nil, } }, }, "seccomp and apparmor combined": { getSecurityContext: func(c *KubernetesConfig) *api.SecurityContext { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Unconfined"}, AppArmorProfile: &KubernetesAppArmorProfile{Type: "Unconfined"}, }) }, getExpectedContainerSecurityContext: func() *api.SecurityContext { return &api.SecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeUnconfined}, AppArmorProfile: &api.AppArmorProfile{Type: api.AppArmorProfileTypeUnconfined}, } }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { config := new(KubernetesConfig) scExpected := tt.getExpectedContainerSecurityContext() scActual := tt.getSecurityContext(config) assert.Equal(t, scExpected, scActual) }) } } func TestPodSecurityContextSeccompAppArmor(t *testing.T) { tests := map[string]struct { podSecurityContext KubernetesPodSecurityContext expectedPodSecurityContext *api.PodSecurityContext }{ "no seccomp or apparmor set": { podSecurityContext: KubernetesPodSecurityContext{}, expectedPodSecurityContext: nil, }, "seccomp profile - Unconfined at pod level": { podSecurityContext: KubernetesPodSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Unconfined"}, }, expectedPodSecurityContext: &api.PodSecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeUnconfined}, }, }, "seccomp profile - RuntimeDefault at pod level": { podSecurityContext: KubernetesPodSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "RuntimeDefault"}, }, expectedPodSecurityContext: &api.PodSecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeRuntimeDefault}, }, }, "seccomp profile - Localhost at pod level": { podSecurityContext: KubernetesPodSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "Localhost", LocalhostProfile: "profiles/pod-profile.json"}, }, expectedPodSecurityContext: func() *api.PodSecurityContext { localhostProfile := "profiles/pod-profile.json" return &api.PodSecurityContext{ SeccompProfile: &api.SeccompProfile{ Type: api.SeccompProfileTypeLocalhost, LocalhostProfile: &localhostProfile, }, } }(), }, "apparmor profile - Unconfined at pod level": { podSecurityContext: KubernetesPodSecurityContext{ AppArmorProfile: &KubernetesAppArmorProfile{Type: "Unconfined"}, }, expectedPodSecurityContext: &api.PodSecurityContext{ AppArmorProfile: &api.AppArmorProfile{Type: api.AppArmorProfileTypeUnconfined}, }, }, "seccomp and apparmor combined at pod level": { podSecurityContext: KubernetesPodSecurityContext{ SeccompProfile: &KubernetesSeccompProfile{Type: "RuntimeDefault"}, AppArmorProfile: &KubernetesAppArmorProfile{Type: "Unconfined"}, }, expectedPodSecurityContext: &api.PodSecurityContext{ SeccompProfile: &api.SeccompProfile{Type: api.SeccompProfileTypeRuntimeDefault}, AppArmorProfile: &api.AppArmorProfile{Type: api.AppArmorProfileTypeUnconfined}, }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { config := &KubernetesConfig{ PodSecurityContext: tt.podSecurityContext, } actual := config.GetPodSecurityContext() assert.Equal(t, tt.expectedPodSecurityContext, actual) }) } } func TestKubernetesPodSpecContents(t *testing.T) { tests := map[string]struct { patchPath string patchContents string patchType KubernetesPodSpecPatchType expectedContents string expectedType KubernetesPodSpecPatchType expectedErr error }{ "yaml to json": { patchContents: `hostname: "test"`, expectedContents: `{"hostname":"test"}`, expectedType: PatchTypeStrategicMergePatchType, }, "json without format to json": { patchContents: `{"hostname":"test"}`, expectedContents: `{"hostname":"test"}`, expectedType: PatchTypeStrategicMergePatchType, }, "json to json": { patchContents: `{"hostname": {"test": "value"}}`, expectedContents: `{"hostname":{"test":"value"}}`, expectedType: PatchTypeStrategicMergePatchType, }, "invalid json": { patchContents: `{"hostname": {{}"test": "value"}}`, expectedType: PatchTypeStrategicMergePatchType, expectedErr: errPatchConversion, }, "invalid yaml": { patchContents: `[invalid yaml`, expectedErr: errPatchConversion, }, "missing file": { patchPath: "missing/file", expectedErr: errPatchFileFail, }, "patch_path and patch ambiguous": { patchPath: "missing/file", patchContents: `{"hostname": {"test": "value"}}`, expectedErr: errPatchAmbiguous, }, "explicit patch type": { patchContents: `hostname: "test"`, patchType: PatchTypeMergePatchType, expectedContents: `{"hostname":"test"}`, expectedType: PatchTypeMergePatchType, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { s := KubernetesPodSpec{ PatchPath: tc.patchPath, Patch: tc.patchContents, PatchType: tc.patchType, } patchBytes, patchType, err := s.PodSpecPatch() if tc.expectedErr != nil { require.ErrorIs(t, err, tc.expectedErr) } else { require.NoError(t, err) assert.Equal(t, tc.expectedContents, string(patchBytes)) assert.Equal(t, tc.expectedType, patchType) } }) } } func TestContainerSecurityCapabilities(t *testing.T) { tests := map[string]struct { getCapabilitiesFn func(c *KubernetesConfig) *api.Capabilities expectedCapabilities *api.Capabilities }{ "container add": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Capabilities: &KubernetesContainerCapabilities{ Add: []api.Capability{"SYS_TIME"}, }, }).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: []api.Capability{"SYS_TIME"}, Drop: nil, }, }, "container drop": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Capabilities: &KubernetesContainerCapabilities{ Drop: []api.Capability{"SYS_TIME"}, }, }).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: nil, Drop: []api.Capability{"SYS_TIME"}, }, }, "container add and drop": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Capabilities: &KubernetesContainerCapabilities{ Add: []api.Capability{"SYS_TIME"}, Drop: []api.Capability{"SYS_TIME"}, }, }).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: []api.Capability{"SYS_TIME"}, Drop: []api.Capability{"SYS_TIME"}, }, }, "container empty": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{}).Capabilities }, }, "container when capAdd and capDrop exist": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { c.CapAdd = []string{"add"} c.CapDrop = []string{"drop"} return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{}).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: []api.Capability{"add"}, Drop: []api.Capability{"drop"}, }, }, "container when capAdd and container capabilities exist": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { c.CapAdd = []string{"add"} c.CapDrop = []string{"drop"} return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Capabilities: &KubernetesContainerCapabilities{ Add: []api.Capability{"add container"}, }, }).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: []api.Capability{"add container"}, Drop: []api.Capability{"drop"}, }, }, "container when capDrop and container capabilities exist": { getCapabilitiesFn: func(c *KubernetesConfig) *api.Capabilities { c.CapAdd = []string{"add"} c.CapDrop = []string{"drop"} return c.GetContainerSecurityContext(KubernetesContainerSecurityContext{ Capabilities: &KubernetesContainerCapabilities{ Drop: []api.Capability{"drop container"}, }, }).Capabilities }, expectedCapabilities: &api.Capabilities{ Add: []api.Capability{"add"}, Drop: []api.Capability{"drop container"}, }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { config := new(KubernetesConfig) c := tt.getCapabilitiesFn(config) assert.Equal(t, tt.expectedCapabilities, c) }) } } func TestGetCapabilities(t *testing.T) { tests := map[string]struct { defaultCapDrop []string capAdd []string capDrop []string assertCapabilities func(t *testing.T, a *api.Capabilities) }{ "no data provided": { assertCapabilities: func(t *testing.T, a *api.Capabilities) { assert.Nil(t, a) }, }, "only default_cap_drop provided": { defaultCapDrop: []string{"CAP_1", "CAP_2"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Empty(t, a.Add) assert.Len(t, a.Drop, 2) assert.Contains(t, a.Drop, api.Capability("CAP_1")) assert.Contains(t, a.Drop, api.Capability("CAP_2")) }, }, "only custom cap_add provided": { capAdd: []string{"CAP_1", "CAP_2"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Len(t, a.Add, 2) assert.Contains(t, a.Add, api.Capability("CAP_1")) assert.Contains(t, a.Add, api.Capability("CAP_2")) assert.Empty(t, a.Drop) }, }, "only custom cap_drop provided": { capDrop: []string{"CAP_1", "CAP_2"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Empty(t, a.Add) assert.Len(t, a.Drop, 2) assert.Contains(t, a.Drop, api.Capability("CAP_1")) assert.Contains(t, a.Drop, api.Capability("CAP_2")) }, }, "default_cap_drop and custom cap_drop sums": { defaultCapDrop: []string{"CAP_1", "CAP_2"}, capDrop: []string{"CAP_3", "CAP_4"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Empty(t, a.Add) assert.Len(t, a.Drop, 4) assert.Contains(t, a.Drop, api.Capability("CAP_1")) assert.Contains(t, a.Drop, api.Capability("CAP_2")) assert.Contains(t, a.Drop, api.Capability("CAP_3")) assert.Contains(t, a.Drop, api.Capability("CAP_4")) }, }, "default_cap_drop and custom cap_drop duplicate": { defaultCapDrop: []string{"CAP_1", "CAP_2"}, capDrop: []string{"CAP_2", "CAP_3"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Empty(t, a.Add) assert.Len(t, a.Drop, 3) assert.Contains(t, a.Drop, api.Capability("CAP_1")) assert.Contains(t, a.Drop, api.Capability("CAP_2")) assert.Contains(t, a.Drop, api.Capability("CAP_3")) }, }, "default_cap_drop and custom cap_add intersect": { defaultCapDrop: []string{"CAP_1", "CAP_2"}, capAdd: []string{"CAP_2", "CAP_3"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Len(t, a.Add, 2) assert.Contains(t, a.Add, api.Capability("CAP_2")) assert.Contains(t, a.Add, api.Capability("CAP_3")) assert.Len(t, a.Drop, 1) assert.Contains(t, a.Drop, api.Capability("CAP_1")) }, }, "default_cap_drop and custom cap_add intersect and cap_drop forces": { defaultCapDrop: []string{"CAP_1", "CAP_2"}, capAdd: []string{"CAP_2", "CAP_3"}, capDrop: []string{"CAP_2", "CAP_4"}, assertCapabilities: func(t *testing.T, a *api.Capabilities) { require.NotNil(t, a) assert.Len(t, a.Add, 1) assert.Contains(t, a.Add, api.Capability("CAP_3")) assert.Len(t, a.Drop, 3) assert.Contains(t, a.Drop, api.Capability("CAP_1")) assert.Contains(t, a.Drop, api.Capability("CAP_2")) assert.Contains(t, a.Drop, api.Capability("CAP_4")) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { c := KubernetesConfig{ CapAdd: tt.capAdd, CapDrop: tt.capDrop, } tt.assertCapabilities(t, c.getCapabilities(tt.defaultCapDrop)) }) } } func TestKubernetesTerminationPeriod(t *testing.T) { tests := map[string]struct { cfg KubernetesConfig expectedPodTerminationGracePeriodSeconds *int64 expectedCleanupGracePeriodSeconds *int64 }{ "all default values": { cfg: KubernetesConfig{}, expectedPodTerminationGracePeriodSeconds: nil, expectedCleanupGracePeriodSeconds: nil, }, "all specified": { cfg: KubernetesConfig{ PodTerminationGracePeriodSeconds: Int64Ptr(3), CleanupGracePeriodSeconds: Int64Ptr(5), }, expectedPodTerminationGracePeriodSeconds: Int64Ptr(3), expectedCleanupGracePeriodSeconds: Int64Ptr(5), }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.EqualValues( t, tt.expectedPodTerminationGracePeriodSeconds, tt.cfg.PodTerminationGracePeriodSeconds, ) assert.EqualValues( t, tt.expectedCleanupGracePeriodSeconds, tt.cfg.CleanupGracePeriodSeconds, ) }) } } func TestConfig_SaveConfig(t *testing.T) { const ( configFileName = "config-file" ) oldTime := time.Now().Add(-1 * time.Hour) cs := NewMockConfigSaver(t) cs.On("Save", configFileName, mock.Anything).Return(nil).Once() c := new(Config) c.ModTime = oldTime c.ConfigSaver = cs err := c.SaveConfig(configFileName) require.NoError(t, err) assert.NotEqual(t, oldTime, c.ModTime, "Expected ModTime field of Config struct to be updated") } func TestConfig_Masked(t *testing.T) { tests := map[string]struct { input *Config expected *Config }{ "nil runner": { input: &Config{ Runners: nil, }, expected: &Config{ Runners: nil, }, }, "runner token": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerCredentials: RunnerCredentials{ Token: "some token", }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerCredentials: RunnerCredentials{ Token: "[MASKED]", }, }, }, }, }, "kubernetes bearer token": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Kubernetes: nil, }, }, { RunnerSettings: RunnerSettings{ Kubernetes: &KubernetesConfig{ BearerToken: "some bearer token", }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Kubernetes: nil, }, }, { RunnerSettings: RunnerSettings{ Kubernetes: &KubernetesConfig{ BearerToken: "[MASKED]", }, }, }, }, }, }, "cache s3 access key": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "some access key", }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ AccessKey: "[MASKED]", }, }, }, }, }, }, }, "cache s3 secret key": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ SecretKey: "some secret key", }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ SecretKey: "[MASKED]", }, }, }, }, }, }, }, "cache s3 session token": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ SessionToken: "some session token", }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: &cacheconfig.CacheS3Config{ SessionToken: "[MASKED]", }, }, }, }, }, }, }, "cache gcs private key": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ GCS: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ GCS: &cacheconfig.CacheGCSConfig{ CacheGCSCredentials: cacheconfig.CacheGCSCredentials{ PrivateKey: "some private key", }, }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ GCS: &cacheconfig.CacheGCSConfig{ CacheGCSCredentials: cacheconfig.CacheGCSCredentials{ PrivateKey: "[MASKED]", }, }, }, }, }, }, }, }, "cache gcs universe domain": { input: &Config{ Runners: []*RunnerConfig{ { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ GCS: &cacheconfig.CacheGCSConfig{ BucketName: "test-bucket", UniverseDomain: "googleapis.com", }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ GCS: &cacheconfig.CacheGCSConfig{ BucketName: "test-bucket", UniverseDomain: "googleapis.com", }, }, }, }, }, }, }, "cache azure account key": { input: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ Azure: &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: cacheconfig.CacheAzureCredentials{ AccountKey: "some account key", }, }, }, }, }, }, }, expected: &Config{ Runners: []*RunnerConfig{ nil, { RunnerSettings: RunnerSettings{ Cache: nil, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ S3: nil, }, }, }, { RunnerSettings: RunnerSettings{ Cache: &cacheconfig.Config{ Azure: &cacheconfig.CacheAzureConfig{ CacheAzureCredentials: cacheconfig.CacheAzureCredentials{ AccountKey: "[MASKED]", }, }, }, }, }, }, }, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { got, err := tt.input.Masked() assert.NoError(t, err) assert.Equal(t, tt.expected, got) }) } } func TestConfig_GetCleanupResourcesTimeout(t *testing.T) { tests := map[string]struct { config string expected time.Duration expectError bool }{ "negative value": { config: ` [[runners]] name = "negative value" executor = "kubernetes" [runners.kubernetes] cleanup_resources_timeout = "-5m"`, expected: KubernetesCleanupResourcesTimeout, }, "zero value": { config: ` [[runners]] name = "zero value" executor = "kubernetes" [runners.kubernetes] cleanup_resources_timeout = "0m"`, expected: KubernetesCleanupResourcesTimeout, }, "no value": { config: ` [[runners]] name = "no value" executor = "kubernetes" [runners.kubernetes]`, expected: KubernetesCleanupResourcesTimeout, }, "valid value": { config: ` [[runners]] name = "valid value" executor = "kubernetes" [runners.kubernetes] cleanup_resources_timeout = "3m"`, expected: 3 * time.Minute, }, "invalid value": { config: ` [[runners]] name = "invalid value" executor = "kubernetes" [runners.kubernetes] cleanup_resources_timeout = "nothing"`, expected: KubernetesCleanupResourcesTimeout, expectError: true, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { cfg := NewConfig() _, e := toml.Decode(tt.config, cfg) if tt.expectError { assert.Error(t, e) return } assert.Equal(t, tt.expected.Seconds(), cfg.Runners[0].Kubernetes.GetCleanupResourcesTimeout().Seconds()) }) } } func Test_Docker_UserIsAllowed(t *testing.T) { tests := map[string]struct { user, runnerUser string allowedUsers []string want bool }{ "no allowed users, neither specified": {want: true}, "no allowed users, runner user specified": {runnerUser: "baba", want: true}, "no allowed users, job user specified": {user: "baba", want: true}, "no allowed users, both specified": {runnerUser: "baba", user: "yaga", want: false}, "allowed users, neither specified": {allowedUsers: []string{"baba"}, want: true}, "allowed users, runner user specified": {allowedUsers: []string{"baba"}, runnerUser: "yaga", want: true}, "allowed users, job user specified": {allowedUsers: []string{"baba"}, runnerUser: "yaga", user: "baba", want: true}, "allowed users, both specified": {allowedUsers: []string{"baba"}, runnerUser: "yaga", user: "yaga", want: false}, } for name, tt := range tests { t.Run(name, func(t *testing.T) { cfg := DockerConfig{ User: tt.runnerUser, AllowedUsers: tt.allowedUsers, } assert.Equal(t, tt.want, cfg.IsUserAllowed(tt.user)) }) } } func Test_Kubernetes_GroupIsAllowed(t *testing.T) { tests := map[string]struct { group string allowedGroups []string expectError bool }{ "no allowed groups": {group: "1000", allowedGroups: nil, expectError: false}, "exact match": {group: "1000", allowedGroups: []string{"1000"}, expectError: false}, "exact match fails": {group: "1000", allowedGroups: []string{"1001"}, expectError: true}, "multiple groups": {group: "1000", allowedGroups: []string{"1000", "1001"}, expectError: false}, "empty group allowed": {group: "", allowedGroups: []string{"1000"}, expectError: false}, "non-numeric group rejected": {group: "wheel", expectError: true}, "root group blocked by default": {group: "0", expectError: true}, "root group explicitly allowed": {group: "0", allowedGroups: []string{"0", "1000"}, expectError: false}, "root group explicitly blocked": {group: "0", allowedGroups: []string{"1000", "1001"}, expectError: true}, "root group bypass via 00": {group: "00", expectError: true}, "root group bypass via 000": {group: "000", expectError: true}, "root group bypass via -0": {group: "-0", expectError: true}, "root group via 00 explicitly allowed": {group: "00", allowedGroups: []string{"0"}, expectError: false}, "root group via 000 explicitly allowed": {group: "000", allowedGroups: []string{"0"}, expectError: false}, "root group via 0 with 00 in allowlist": {group: "0", allowedGroups: []string{"00"}, expectError: false}, "numeric group with mixed allowlist match": {group: "1000", allowedGroups: []string{"wheel", "1000"}, expectError: false}, "numeric group with mixed allowlist no match": {group: "1000", allowedGroups: []string{"wheel", "1001"}, expectError: true}, } for name, tt := range tests { t.Run(name, func(t *testing.T) { cfg := KubernetesConfig{ AllowedGroups: tt.allowedGroups, } err := cfg.IsGroupAllowed(tt.group) if tt.expectError { assert.Error(t, err) } else { assert.NoError(t, err) } }) } } func Test_Kubernetes_UserIsAllowed(t *testing.T) { tests := map[string]struct { user string allowedUsers []string expectError bool }{ "empty user": {user: "", expectError: false}, "no allowed users specified": {user: "1000", expectError: false}, "user in allowed list": {user: "1000", allowedUsers: []string{"1000", "1001"}, expectError: false}, "user not in allowed list": {user: "1002", allowedUsers: []string{"1000", "1001"}, expectError: true}, "single user allowed list": {user: "1000", allowedUsers: []string{"1000"}, expectError: false}, "single user not in list": {user: "1001", allowedUsers: []string{"1000"}, expectError: true}, "non-numeric user rejected": {user: "nobody", expectError: true}, "root user blocked by default": {user: "0", expectError: true}, "root user explicitly allowed": {user: "0", allowedUsers: []string{"0", "1000"}, expectError: false}, "root user explicitly blocked": {user: "0", allowedUsers: []string{"1000", "1001"}, expectError: true}, "root user bypass via 00": {user: "00", expectError: true}, "root user bypass via 000": {user: "000", expectError: true}, "root user bypass via -0": {user: "-0", expectError: true}, "root user via 00 explicitly allowed": {user: "00", allowedUsers: []string{"0"}, expectError: false}, "root user via 000 explicitly allowed": {user: "000", allowedUsers: []string{"0"}, expectError: false}, "root user via 0 with 00 in allowlist": {user: "0", allowedUsers: []string{"00"}, expectError: false}, "numeric user with mixed allowlist match": {user: "1000", allowedUsers: []string{"wheel", "1000"}, expectError: false}, "numeric user with mixed allowlist no match": {user: "1000", allowedUsers: []string{"wheel", "1001"}, expectError: true}, } for name, tt := range tests { t.Run(name, func(t *testing.T) { cfg := KubernetesConfig{ AllowedUsers: tt.allowedUsers, } err := cfg.IsUserAllowed(tt.user) if tt.expectError { assert.Error(t, err) } else { assert.NoError(t, err) } }) } } func TestLoadConfig(t *testing.T) { tests := map[string]struct { config string validateConfig func(t *testing.T, config *Config) assertError func(t *testing.T, err error) }{ "parse defaults": { config: ``, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 0, len(config.Runners)) require.Equal(t, 15*time.Minute, *config.ConnectionMaxAge) }, }, "connection max age set": { config: `connection_max_age = "1s"`, validateConfig: func(t *testing.T, config *Config) { require.Equal(t, 0, len(config.Runners)) require.Equal(t, 1*time.Second, *config.ConnectionMaxAge) }, }, "invalid labels": { config: `[labels] # Global defaults "invalid/key" = "valid_value" [[runners]] name = "labels-test" [runners.labels] # Runner-specific data env = "prod" `, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrInvalidLabelKey) }, }, "valid labels": { config: ` concurrent = 1 [labels] "env" = "prod" test = "value" test_label = "value" test-label = "value" "test.label" = "value" [[runners]] name = "labels-test" [runners.labels] "shard" = "default" test = "override" test_label = "override" "test-label" = "override" "test.label" = "override" `, validateConfig: func(t *testing.T, config *Config) { globalLabels := Labels{ "env": "prod", "test": "value", "test_label": "value", "test.label": "value", "test-label": "value", } runnerLabels := Labels{ "shard": "default", "test": "override", "test_label": "override", "test.label": "override", "test-label": "override", } computedLabels := Labels{ "env": "prod", "shard": "default", "test": "override", "test_label": "override", "test.label": "override", "test-label": "override", } assert.Equal(t, globalLabels, config.Labels) if assert.GreaterOrEqual(t, len(config.Runners), 1) { assert.Equal(t, runnerLabels, config.Runners[0].Labels) assert.Equal(t, computedLabels, config.Runners[0].ComputedLabels()) } }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { tempFile, err := os.CreateTemp(t.TempDir(), "test_config") require.NoError(t, err) defer tempFile.Close() _, err = tempFile.WriteString(tt.config) require.NoError(t, err) cfg := NewConfig() err = cfg.LoadConfig(tempFile.Name()) if tt.assertError != nil { tt.assertError(t, err) return } assert.NoError(t, err) if tt.validateConfig != nil { tt.validateConfig(t, cfg) } }) } } func TestLoadConfig_ExpandsEnvironmentVariables(t *testing.T) { t.Setenv("TEST_RUNNER_URL", "https://gitlab.example.com") t.Setenv("TEST_RUNNER_TOKEN_1", "glrt-token-one") t.Setenv("TEST_RUNNER_TOKEN_2", "glrt-token-two") configContent := ` [[runners]] name = "runner-1" url = "$TEST_RUNNER_URL" token = "${TEST_RUNNER_TOKEN_1}" [[runners]] name = "runner-2" url = "${TEST_RUNNER_URL}" token = "$TEST_RUNNER_TOKEN_2" [[runners]] name = "runner-literal" url = "https://literal.example.com" token = "glrt-literal-token" ` tempFile, err := os.CreateTemp(t.TempDir(), "test_config") require.NoError(t, err) defer tempFile.Close() _, err = tempFile.WriteString(configContent) require.NoError(t, err) cfg := NewConfig() err = cfg.LoadConfig(tempFile.Name()) require.NoError(t, err) require.Len(t, cfg.Runners, 3) // runner-1: both $VAR and ${VAR} syntax should work assert.Equal(t, "https://gitlab.example.com", cfg.Runners[0].URL) assert.Equal(t, "glrt-token-one", cfg.Runners[0].Token) // runner-2: same expansion assert.Equal(t, "https://gitlab.example.com", cfg.Runners[1].URL) assert.Equal(t, "glrt-token-two", cfg.Runners[1].Token) // runner-literal: literal values should remain unchanged assert.Equal(t, "https://literal.example.com", cfg.Runners[2].URL) assert.Equal(t, "glrt-literal-token", cfg.Runners[2].Token) } func Test_CommandLineFlags(t *testing.T) { tests := map[string]struct { args []string expectedError bool verifyArgs func(t *testing.T, config *RunnerConfig) }{ "Kubernetes host aliases": { args: []string{ "--request-concurrency", "10", "--kubernetes-host_aliases", `[{"ip":"192.168.1.100","hostnames":["myservice.local"]},{"ip":"192.168.1.101","hostnames":["otherservice.local"]}]`, }, verifyArgs: func(t *testing.T, config *RunnerConfig) { assert.Equal(t, 10, config.RequestConcurrency) assert.Len(t, config.Kubernetes.HostAliases, 2) assert.Equal(t, "192.168.1.100", config.Kubernetes.HostAliases[0].IP) assert.Len(t, config.Kubernetes.HostAliases[0].Hostnames, 1) assert.Equal(t, "myservice.local", config.Kubernetes.HostAliases[0].Hostnames[0]) assert.Len(t, config.Kubernetes.HostAliases[1].Hostnames, 1) assert.Equal(t, "otherservice.local", config.Kubernetes.HostAliases[1].Hostnames[0]) }, }, "Bad Kubernetes host aliases": { args: []string{ "--kubernetes-host_aliases", "{ bad", }, expectedError: true, }, } // Loop across tests for tn, tt := range tests { t.Run(tn, func(t *testing.T) { config := &RunnerConfig{} flags := clihelpers.GetFlagsFromStruct(config) flagSet := flag.NewFlagSet("test-flags", flag.ContinueOnError) for _, f := range flags { f.Apply(flagSet) } err := flagSet.Parse(tt.args) if tt.expectedError { assert.Error(t, err) } else { assert.NoError(t, err) if tt.verifyArgs != nil { tt.verifyArgs(t, config) } } }) } } func TestConfig_SaveConfig_CustomBuildDir(t *testing.T) { tests := map[string]struct { customBuildDir CustomBuildDir expectedTomlRE string notExpectedTomlRE string }{ "not explicitly set": { customBuildDir: CustomBuildDir{}, notExpectedTomlRE: "custom_build_dir", }, "explicitly enabled": { customBuildDir: CustomBuildDir{Enabled: ptr(true)}, expectedTomlRE: `(?m)\[runners\.custom_build_dir\]\n\s+enabled = true\n`, }, "explicitly disabled": { customBuildDir: CustomBuildDir{Enabled: ptr(false)}, expectedTomlRE: `(?m)\[runners\.custom_build_dir\]\n\s+enabled = false\n`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { cs := NewMockConfigSaver(t) cs.On("Save", "", mock.MatchedBy(func(b []byte) bool { tomlBlob := string(b) if e := test.expectedTomlRE; e != "" { assert.Regexp(t, e, tomlBlob) } if ne := test.notExpectedTomlRE; ne != "" { assert.NotRegexp(t, ne, tomlBlob) } return true })).Return(nil).Once() c := &Config{ ConfigSaver: cs, Runners: []*RunnerConfig{ { Name: name, RunnerSettings: RunnerSettings{ CustomBuildDir: test.customBuildDir, }, }, }, } err := c.SaveConfig("") require.NoError(t, err) }) } } func ptr[T any](v T) *T { return &v } func TestRunnerByName(t *testing.T) { examples := map[string]struct { runners []*RunnerConfig runnerName string expectedIndex int expectedError error }{ "finds runner by name": { runners: []*RunnerConfig{ { Name: "runner1", }, { Name: "runner2", }, }, runnerName: "runner2", expectedIndex: 1, }, "does not find non-existent runner": { runners: []*RunnerConfig{ { Name: "runner1", }, { Name: "runner2", }, }, runnerName: "runner3", expectedIndex: -1, expectedError: fmt.Errorf("could not find a runner with the name 'runner3'"), }, } for tn, tt := range examples { t.Run(tn, func(t *testing.T) { config := &Config{ Runners: tt.runners, } runner, err := config.RunnerByName(tt.runnerName) if tt.expectedIndex == -1 { assert.Nil(t, runner) } else { assert.Equal(t, tt.runners[tt.expectedIndex], runner) } assert.Equal(t, tt.expectedError, err) }) } } func TestRunnerByToken(t *testing.T) { examples := map[string]struct { runners []*RunnerConfig runnerToken string expectedIndex int expectedError error }{ "finds runner by token": { runners: []*RunnerConfig{ { RunnerCredentials: RunnerCredentials{ Token: "runner1", }, }, { RunnerCredentials: RunnerCredentials{ Token: "runner2", }, }, }, runnerToken: "runner2", expectedIndex: 1, }, "does not find non-existent runner authentication token": { runners: []*RunnerConfig{ { RunnerCredentials: RunnerCredentials{ Token: "runner1", }, }, { RunnerCredentials: RunnerCredentials{ Token: "runner2", }, }, }, runnerToken: "runner3", expectedIndex: -1, expectedError: fmt.Errorf("could not find a runner with the token 'runner3'"), }, } for tn, tt := range examples { t.Run(tn, func(t *testing.T) { config := &Config{ Runners: tt.runners, } runner, err := config.RunnerByToken(tt.runnerToken) if tt.expectedIndex == -1 { assert.Nil(t, runner) } else { assert.Equal(t, tt.runners[tt.expectedIndex], runner) } assert.Equal(t, tt.expectedError, err) }) } } func TestRunnerByURLAndID(t *testing.T) { examples := map[string]struct { runners []*RunnerConfig runnerURL string runnerID int64 expectedIndex int expectedError error }{ "finds runner by name": { runners: []*RunnerConfig{ { RunnerCredentials: RunnerCredentials{ ID: 1, URL: "https://gitlab1.example.com/", }, }, { RunnerCredentials: RunnerCredentials{ ID: 2, URL: "https://gitlab1.example.com/", }, }, }, runnerURL: "https://gitlab1.example.com/", runnerID: 1, expectedIndex: 0, }, "does not find runner with wrong ID": { runners: []*RunnerConfig{ { RunnerCredentials: RunnerCredentials{ ID: 1, URL: "https://gitlab1.example.com/", }, }, { RunnerCredentials: RunnerCredentials{ ID: 2, URL: "https://gitlab1.example.com/", }, }, }, runnerURL: "https://gitlab1.example.com/", runnerID: 3, expectedIndex: -1, expectedError: fmt.Errorf(`could not find a runner with the URL "https://gitlab1.example.com/" and ID 3`), }, "does not find runner with wrong URL": { runners: []*RunnerConfig{ { RunnerCredentials: RunnerCredentials{ ID: 1, URL: "https://gitlab1.example.com/", }, }, { RunnerCredentials: RunnerCredentials{ ID: 2, URL: "https://gitlab1.example.com/", }, }, }, runnerURL: "https://gitlab2.example.com/", runnerID: 1, expectedIndex: -1, expectedError: fmt.Errorf(`could not find a runner with the URL "https://gitlab2.example.com/" and ID 1`), }, } for tn, tt := range examples { t.Run(tn, func(t *testing.T) { config := &Config{ Runners: tt.runners, } runner, err := config.RunnerByURLAndID(tt.runnerURL, tt.runnerID) if tt.expectedIndex == -1 { assert.Nil(t, runner) } else { assert.Equal(t, tt.runners[tt.expectedIndex], runner) } assert.Equal(t, tt.expectedError, err) }) } } func TestRunnerByNameAndToken(t *testing.T) { examples := map[string]struct { runners []*RunnerConfig runnerName string runnerToken string expectedIndex int expectedError error }{ "finds runner by name and token": { runners: []*RunnerConfig{ { Name: "runner1", RunnerCredentials: RunnerCredentials{ Token: "token1", }, }, { Name: "runner2", RunnerCredentials: RunnerCredentials{ Token: "token2", }, }, }, runnerName: "runner1", runnerToken: "token1", expectedIndex: 0, }, "does not find runner with wrong name": { runners: []*RunnerConfig{ { Name: "runner1", RunnerCredentials: RunnerCredentials{ Token: "token1", }, }, { Name: "runner2", RunnerCredentials: RunnerCredentials{ Token: "token2", }, }, }, runnerName: "runner3", runnerToken: "token1", expectedIndex: -1, expectedError: fmt.Errorf(`could not find a runner with the Name 'runner3' and Token 'token1'`), }, "does not find runner with wrong token": { runners: []*RunnerConfig{ { Name: "runner1", RunnerCredentials: RunnerCredentials{ Token: "token1", }, }, { Name: "runner2", RunnerCredentials: RunnerCredentials{ Token: "token2", }, }, }, runnerName: "runner1", runnerToken: "token3", expectedIndex: -1, expectedError: fmt.Errorf(`could not find a runner with the Name 'runner1' and Token 'token3'`), }, } for tn, tt := range examples { t.Run(tn, func(t *testing.T) { config := &Config{ Runners: tt.runners, } runner, err := config.RunnerByNameAndToken(tt.runnerName, tt.runnerToken) if tt.expectedIndex == -1 { assert.Nil(t, runner) } else { assert.Equal(t, tt.runners[tt.expectedIndex], runner) } assert.Equal(t, tt.expectedError, err) }) } } func TestRunnerSettings_ComputeLabels(t *testing.T) { tests := map[string]struct { runnerWorkerLabels Labels initialRunnerWorkerLabels Labels globalDefaults Labels expectedResult Labels }{ "nil labels and nil computed with empty global defaults": { runnerWorkerLabels: nil, globalDefaults: Labels{}, expectedResult: Labels{}, }, "nil labels and nil computed with global defaults": { runnerWorkerLabels: nil, globalDefaults: Labels{"env": "prod", "team": "backend"}, expectedResult: Labels{"env": "prod", "team": "backend"}, }, "empty labels with global defaults": { runnerWorkerLabels: Labels{}, globalDefaults: Labels{"env": "prod", "team": "backend"}, expectedResult: Labels{"env": "prod", "team": "backend"}, }, "runner labels override global defaults": { runnerWorkerLabels: Labels{"env": "staging", "region": "us-west"}, globalDefaults: Labels{"env": "prod", "team": "backend"}, expectedResult: Labels{"env": "staging", "team": "backend", "region": "us-west"}, }, "runner labels only, no global defaults": { runnerWorkerLabels: Labels{"custom": "value", "runner": "specific"}, expectedResult: Labels{"custom": "value", "runner": "specific"}, }, "existing computed labels are overwritten": { runnerWorkerLabels: Labels{"env": "staging"}, initialRunnerWorkerLabels: Labels{"old": "value", "env": "dev"}, globalDefaults: Labels{"team": "backend"}, expectedResult: Labels{"env": "staging", "team": "backend"}, }, "nil global defaults with existing labels": { runnerWorkerLabels: Labels{"runner": "test"}, globalDefaults: nil, expectedResult: Labels{"runner": "test"}, }, "complex scenario with multiple overrides": { runnerWorkerLabels: Labels{"env": "staging", "version": "1.2.3", "team": "frontend"}, globalDefaults: Labels{"env": "prod", "team": "backend", "region": "us-east", "cost-center": "eng"}, expectedResult: Labels{"env": "staging", "version": "1.2.3", "team": "frontend", "region": "us-east", "cost-center": "eng"}, }, "empty string values in labels": { runnerWorkerLabels: Labels{"empty": "", "normal": "value"}, globalDefaults: Labels{"global": "default", "empty": "global-value"}, expectedResult: Labels{"global": "default", "empty": "", "normal": "value"}, }, "labels with special characters in key": { runnerWorkerLabels: Labels{"key-with-dashes": "value1", "key_with_underscores": "value2", "key.with.dots": "value3"}, globalDefaults: Labels{"key-with_different.characters": "value4"}, expectedResult: Labels{"key-with-dashes": "value1", "key_with_underscores": "value2", "key.with.dots": "value3", "key-with_different.characters": "value4"}, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { r := &RunnerSettings{ Labels: tt.runnerWorkerLabels, labels: tt.initialRunnerWorkerLabels, } r.ComputeLabels(tt.globalDefaults) assert.Equal(t, tt.runnerWorkerLabels, r.Labels) assert.Equal(t, tt.expectedResult, r.labels, "computed labels should match expected result") }) } } func TestRunnerSettings_ComputedLabels(t *testing.T) { tests := map[string]struct { computedLabels Labels expected Labels }{ "nil computed labels": { computedLabels: nil, expected: nil, }, "empty computed labels": { computedLabels: Labels{}, expected: Labels{}, }, "single label": { computedLabels: Labels{"env": "prod"}, expected: Labels{"env": "prod"}, }, "multiple labels": { computedLabels: Labels{"env": "prod", "team": "backend", "region": "us-west"}, expected: Labels{"env": "prod", "team": "backend", "region": "us-west"}, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { r := &RunnerSettings{ labels: tt.computedLabels, } assert.Equal(t, tt.expected, r.ComputedLabels(), "ComputedLabels should return the labels field") }) } } func TestRunnerSettings_CombineLabels_MultipleCalls(t *testing.T) { t.Run("multiple calls to ComputeLabels", func(t *testing.T) { r := &RunnerSettings{ Labels: Labels{"runner": "test"}, } // First call r.ComputeLabels(Labels{"env": "prod", "team": "backend"}) expected1 := Labels{"env": "prod", "team": "backend", "runner": "test"} assert.Equal(t, expected1, r.ComputedLabels()) // Second call with different global defaults r.ComputeLabels(Labels{"env": "staging", "region": "us-east"}) expected2 := Labels{"env": "staging", "region": "us-east", "runner": "test"} assert.Equal(t, expected2, r.ComputedLabels()) }) } func TestConfig_Validate(t *testing.T) { tests := map[string]struct { globalLabels Labels runnerLabels Labels assertError func(t *testing.T, err error) }{ "all labels are valid": { globalLabels: Labels{ "env": "production", }, runnerLabels: Labels{ "privileged": "true", }, }, "invalid global label key": { globalLabels: Labels{ "test/key": "test_value", }, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrInvalidLabelKey) assert.Contains(t, err.Error(), "lobal labels") }, }, "invalid global label value": { globalLabels: Labels{ "test_key": "test/value", }, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrInvalidLabelValue) assert.Contains(t, err.Error(), "lobal labels") }, }, "invalid runner label key": { runnerLabels: Labels{ "test/key": "test_value", }, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrInvalidLabelKey) assert.Contains(t, err.Error(), "runner-tested") }, }, "invalid runner label value": { runnerLabels: Labels{ "test_key": "test/value", }, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrInvalidLabelValue) assert.Contains(t, err.Error(), "runner-tested") }, }, "too many labels": { globalLabels: Labels{ "one": "1", "two": "2", "three": "3", "four": "4", "five": "5", "six": "6", "seven": "7", "eight": "8", "nine": "9", "ten": "10", "eleven": "11", "twelve": "12", "thirteen": "13", "fourteen": "14", "fifteen": "15", "sixteen": "16", "seventeen": "17", }, runnerLabels: Labels{ "eighteen": "18", "nineteen": "19", "twenty": "20", "twenty-one": "21", "twenty-two": "22", "twenty-three": "23", "twenty-four": "24", "twenty-five": "25", "twenty-six": "26", "twenty-seven": "27", "twenty-eight": "28", "twenty-nine": "29", "thirty": "30", "thirty-one": "31", "thirty-two": "32", "thirty-three": "33", "thirty-four": "34", }, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, ErrLabelsCountExceeded) assert.Contains(t, err.Error(), "runner-tested") }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { c := &Config{ Labels: tc.globalLabels, Runners: []*RunnerConfig{ { Name: "runner-always-valid", RunnerSettings: RunnerSettings{ Labels: Labels{ "runner": "name", }, }, }, { Name: "runner-tested", RunnerSettings: RunnerSettings{ Labels: tc.runnerLabels, }, }, }, } for _, r := range c.Runners { r.ComputeLabels(c.Labels) } assert.NoError(t, c.Runners[0].Validate()) err := c.Validate() if tc.assertError == nil { assert.NoError(t, err) return } tc.assertError(t, err) }) } } func TestArtifactConfig_GetUploadTimeout(t *testing.T) { tests := []struct { name string config ArtifactConfig expected time.Duration }{ { name: "default timeout when nil", config: ArtifactConfig{UploadTimeout: nil}, expected: time.Hour, }, { name: "custom timeout when set", config: ArtifactConfig{UploadTimeout: &[]time.Duration{30 * time.Minute}[0]}, expected: 30 * time.Minute, }, { name: "zero timeout when set to zero", config: ArtifactConfig{UploadTimeout: &[]time.Duration{0}[0]}, expected: 0, }, { name: "very large timeout", config: ArtifactConfig{UploadTimeout: &[]time.Duration{24 * time.Hour}[0]}, expected: 24 * time.Hour, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := tt.config.GetUploadTimeout() assert.Equal(t, tt.expected, result) }) } } func TestArtifactConfig_GetResponseHeaderTimeout(t *testing.T) { tests := []struct { name string config ArtifactConfig expected time.Duration }{ { name: "default timeout when nil", config: ArtifactConfig{ResponseHeaderTimeout: nil}, expected: 10 * time.Minute, }, { name: "custom timeout when set", config: ArtifactConfig{ResponseHeaderTimeout: &[]time.Duration{5 * time.Minute}[0]}, expected: 5 * time.Minute, }, { name: "zero timeout when set to zero", config: ArtifactConfig{ResponseHeaderTimeout: &[]time.Duration{0}[0]}, expected: 0, }, { name: "very large timeout", config: ArtifactConfig{ResponseHeaderTimeout: &[]time.Duration{time.Hour}[0]}, expected: time.Hour, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := tt.config.GetResponseHeaderTimeout() assert.Equal(t, tt.expected, result) }) } } func TestRunnerSettings_ArtifactConfig_Integration(t *testing.T) { tests := []struct { name string tomlConfig string expectedUpload time.Duration expectedHeader time.Duration }{ { name: "default values when not specified", tomlConfig: ` [[runners]] name = "test" url = "https://gitlab.example.com" token = "test-token" `, expectedUpload: time.Hour, expectedHeader: 10 * time.Minute, }, { name: "custom values when specified", tomlConfig: ` [[runners]] name = "test" url = "https://gitlab.example.com" token = "test-token" [runners.artifact] upload_timeout = "30m" response_header_timeout = "5m" `, expectedUpload: 30 * time.Minute, expectedHeader: 5 * time.Minute, }, { name: "zero values", tomlConfig: ` [[runners]] name = "test" url = "https://gitlab.example.com" token = "test-token" [runners.artifact] upload_timeout = "0s" response_header_timeout = "0s" `, expectedUpload: 0, expectedHeader: 0, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var config Config err := toml.Unmarshal([]byte(tt.tomlConfig), &config) require.NoError(t, err) require.Len(t, config.Runners, 1) runner := config.Runners[0] assert.Equal(t, tt.expectedUpload, runner.Artifact.GetUploadTimeout()) assert.Equal(t, tt.expectedHeader, runner.Artifact.GetResponseHeaderTimeout()) }) } } func TestRunnerConfig_ValidateMachineOptionsWithName(t *testing.T) { tests := map[string]struct { options []string expectError bool errorMessage string }{ "valid options with %s": { options: []string{"--option=%s", "--another=%s-suffix"}, expectError: false, }, "empty options": { options: []string{}, expectError: false, }, "nil options": { options: nil, expectError: false, }, "nil machine config": { options: nil, expectError: false, }, "invalid option without %s": { options: []string{"--option=value"}, expectError: true, errorMessage: `machine option with name "--option=value" must contain %s placeholder`, }, "mixed valid and invalid": { options: []string{"--valid=%s", "--invalid=value"}, expectError: true, errorMessage: `machine option with name "--invalid=value" must contain %s placeholder`, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { config := &RunnerConfig{ RunnerSettings: RunnerSettings{ Machine: &DockerMachine{ MachineOptionsWithName: tc.options, }, }, } err := config.Validate() if tc.expectError { require.Error(t, err) assert.Contains(t, err.Error(), tc.errorMessage) } else { assert.NoError(t, err) } }) } } func TestParseVariable(t *testing.T) { v, err := parseVariable("key=value=value2") assert.NoError(t, err) assert.Equal(t, spec.Variable{Key: "key", Value: "value=value2"}, v) } func TestInvalidParseVariable(t *testing.T) { _, err := parseVariable("some_other_key") assert.Error(t, err) } func TestRunnerCredentials_SameAs(t *testing.T) { tests := map[string]struct { c *RunnerCredentials other *RunnerCredentials result bool }{ "same token and same URL": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, result: true, }, "same token but different URL": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.example.com", Token: "token123", }, result: false, }, "different token but same URL": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token456", }, result: false, }, "different token and different URL": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.example.com", Token: "token456", }, result: false, }, "same token, first URL is wildcard *": { c: &RunnerCredentials{ URL: "*", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, result: true, }, "same token, second URL is wildcard *": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "*", Token: "token123", }, result: true, }, "same token, both URLs are wildcard *": { c: &RunnerCredentials{ URL: "*", Token: "token123", }, other: &RunnerCredentials{ URL: "*", Token: "token123", }, result: true, }, "same token, first URL is empty": { c: &RunnerCredentials{ URL: "", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, result: true, }, "same token, second URL is empty": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "", Token: "token123", }, result: true, }, "same token, both URLs are empty": { c: &RunnerCredentials{ URL: "", Token: "token123", }, other: &RunnerCredentials{ URL: "", Token: "token123", }, result: true, }, "same token, empty and wildcard *": { c: &RunnerCredentials{ URL: "", Token: "token123", }, other: &RunnerCredentials{ URL: "*", Token: "token123", }, result: true, }, "different token, first URL is wildcard *": { c: &RunnerCredentials{ URL: "*", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token456", }, result: false, }, "different token, second URL is wildcard *": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "*", Token: "token456", }, result: false, }, "same token, URLs differ only by trailing slash": { c: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com/", Token: "token123", }, result: false, }, "same token, URLs differ by protocol": { c: &RunnerCredentials{ URL: "http://gitlab.com", Token: "token123", }, other: &RunnerCredentials{ URL: "https://gitlab.com", Token: "token123", }, result: false, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { result := tt.c.SameAs(tt.other) assert.Equal(t, tt.result, result, "SameAs should return %v for this case", tt.result) }) } } ================================================ FILE: common/consts.go ================================================ package common import ( "time" "github.com/go-http-utils/headers" ) const DefaultTimeout = 7200 const DefaultExecTimeout = 1800 const DefaultCICDConfigFile = ".gitlab-ci.yml" const CheckInterval = 3 * time.Second const NotHealthyCheckInterval = 300 const ReloadConfigInterval = 3 * time.Second const DefaultUnhealthyRequestsLimit = 3 const DefaultUnhealthyInterval = 60 * time.Minute const DefaultfinalUpdateBackoffMax = 60 * time.Minute const DefaultFinalUpdateRetryLimit = 10 const DefaultWaitForServicesTimeout = 30 const DefaultShutdownTimeout = 30 * time.Second const PreparationRetries = 3 const DefaultGetSourcesAttempts = 1 const DefaultArtifactDownloadAttempts = 1 const DefaultRestoreCacheAttempts = 1 const DefaultExecutorStageAttempts = 1 const DefaultAfterScriptIgnoreErrors = true const KubernetesPollInterval = 3 const KubernetesPollTimeout = 180 const KubernetesCleanupResourcesTimeout = 5 * time.Minute const KubernetesResourceAvailabilityCheckMaxAttempts = 5 const AfterScriptTimeout = 5 * time.Minute const DefaultMetricsServerPort = 9252 const DefaultCacheRequestTimeout = 10 const DefaultNetworkClientTimeout = 60 * time.Minute const DefaultArtifactUploadTimeout = time.Hour const DefaultArtifactResponseHeaderTimeout = 10 * time.Minute const DefaultSessionTimeout = 30 * time.Minute const WaitForBuildFinishTimeout = 5 * time.Minute const SecretVariableDefaultsToFile = true const TokenResetIntervalFactor = 0.75 const DefaultRequestRetryLimit = 5 const RequestRetryBackoffMin = 500 * time.Millisecond const DefaultRequestRetryBackoffMax = 2000 * time.Millisecond const ( DefaultTraceOutputLimit = 4 * 1024 * 1024 // in bytes DefaultTracePatchLimit = 1024 * 1024 // in bytes DefaultUpdateInterval = 3 * time.Second MaxUpdateInterval = 15 * time.Minute MinTraceForceSendInterval = 30 * time.Second MaxTraceForceSendInterval = 30 * time.Minute TraceForceSendUpdateIntervalMultiplier = 4 // DefaultReaderBufferSize is the size of the line buffer. // Docker/Kubernetes use the same size to split lines DefaultReaderBufferSize = 16 * 1024 ) const ( ExecutorKubernetes = "kubernetes" DefaultKubernetesIntegrationTestNamespace = "k8s-runner-integration-tests" ) var PreparationRetryInterval = 3 * time.Second const ( TestAlpineImage = "alpine:3.14.2" TestWindowsImage = "mcr.microsoft.com/windows/servercore:%s" TestPwshImage = "mcr.microsoft.com/powershell:7.1.1-alpine-3.12-20210125" TestAlpineNoRootImage = "registry.gitlab.com/gitlab-org/gitlab-runner/alpine-no-root:latest" TestAlpineEntrypointImage = "registry.gitlab.com/gitlab-org/gitlab-runner/alpine-entrypoint:latest" TestAlpineEntrypointStderrImage = "registry.gitlab.com/gitlab-org/gitlab-runner/alpine-entrypoint-stderr:latest" TestHelperEntrypointImage = "registry.gitlab.com/gitlab-org/gitlab-runner/helper-entrypoint:latest" TestAlpineIDOverflowImage = "registry.gitlab.com/gitlab-org/gitlab-runner/alpine-id-overflow:latest" TestDockerDindImage = "docker:23-dind" TestDockerGitImage = "docker:23-git" TestLivenessImage = "registry.gitlab.com/gitlab-org/ci-cd/tests/liveness:0.1.0" ) // HTTP related constants const ( Accept = headers.Accept AcceptCharset = headers.AcceptCharset AcceptEncoding = headers.AcceptEncoding AcceptLanguage = headers.AcceptLanguage Authorization = headers.Authorization CacheControl = headers.CacheControl ContentLength = headers.ContentLength ContentMD5 = headers.ContentMD5 ContentType = headers.ContentType DoNotTrack = headers.DoNotTrack IfMatch = headers.IfMatch IfModifiedSince = headers.IfModifiedSince IfNoneMatch = headers.IfNoneMatch IfRange = headers.IfRange IfUnmodifiedSince = headers.IfUnmodifiedSince MaxForwards = headers.MaxForwards ProxyAuthorization = headers.ProxyAuthorization Pragma = headers.Pragma Range = headers.Range Referer = headers.Referer UserAgent = headers.UserAgent TE = headers.TE Via = headers.Via Warning = headers.Warning Cookie = headers.Cookie Origin = headers.Origin AcceptDatetime = headers.AcceptDatetime XRequestedWith = headers.XRequestedWith AccessControlAllowOrigin = headers.AccessControlAllowOrigin AccessControlAllowMethods = headers.AccessControlAllowMethods AccessControlAllowHeaders = headers.AccessControlAllowHeaders AccessControlAllowCredentials = headers.AccessControlAllowCredentials AccessControlExposeHeaders = headers.AccessControlExposeHeaders AccessControlMaxAge = headers.AccessControlMaxAge AccessControlRequestMethod = headers.AccessControlRequestMethod AccessControlRequestHeaders = headers.AccessControlRequestHeaders AcceptPatch = headers.AcceptPatch AcceptRanges = headers.AcceptRanges Allow = headers.Allow ContentEncoding = headers.ContentEncoding ContentLanguage = headers.ContentLanguage ContentLocation = headers.ContentLocation ContentDisposition = headers.ContentDisposition ContentRange = headers.ContentRange ETag = headers.ETag Expires = headers.Expires LastModified = headers.LastModified Link = headers.Link Location = headers.Location P3P = headers.P3P ProxyAuthenticate = headers.ProxyAuthenticate Refresh = headers.Refresh RetryAfter = headers.RetryAfter Server = headers.Server SetCookie = headers.SetCookie StrictTransportSecurity = headers.StrictTransportSecurity TransferEncoding = headers.TransferEncoding Upgrade = headers.Upgrade Vary = headers.Vary WWWAuthenticate = headers.WWWAuthenticate // Non-Standard XFrameOptions = headers.XFrameOptions XXSSProtection = headers.XXSSProtection ContentSecurityPolicy = headers.ContentSecurityPolicy XContentSecurityPolicy = headers.XContentSecurityPolicy XWebKitCSP = headers.XWebKitCSP XContentTypeOptions = headers.XContentTypeOptions XPoweredBy = headers.XPoweredBy XUACompatible = headers.XUACompatible XForwardedProto = headers.XForwardedProto XHTTPMethodOverride = headers.XHTTPMethodOverride XForwardedFor = headers.XForwardedFor XRealIP = headers.XRealIP XCSRFToken = headers.XCSRFToken XRatelimitLimit = headers.XRatelimitLimit XRatelimitRemaining = headers.XRatelimitRemaining XRatelimitReset = headers.XRatelimitReset PrivateToken = "PRIVATE-TOKEN" JobToken = "JOB-TOKEN" RunnerToken = "RUNNER-TOKEN" ) ================================================ FILE: common/executor.go ================================================ package common import ( "context" "errors" "fmt" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) // ExecutorData is an empty interface representing free-form data // executor will use. Meant to be casted, e.g. virtual machine details. type ExecutorData interface{} // ExecutorDataLogger is an optional interface that ExecutorData implementations // can implement to provide executor-specific fields for structured logging. type ExecutorDataLogger interface { LogFields() map[string]string } // GetExecutorLogFields extracts log fields from ExecutorData if it implements // ExecutorDataLogger, otherwise returns nil. func GetExecutorLogFields(data ExecutorData) map[string]string { if l, ok := data.(ExecutorDataLogger); ok { return l.LogFields() } return nil } // ExecutorCommand stores the script executor will run on a given stage. // If Predefined it will try to use already allocated resources. type ExecutorCommand struct { Script string Stage BuildStage Predefined bool Context context.Context } // ExecutorStage represents a stage of build execution in the executor scope. type ExecutorStage string const ( // ExecutorStageCreated means the executor is being initialized, i.e. created. ExecutorStageCreated ExecutorStage = "created" // ExecutorStagePrepare means the executor is preparing its environment, initializing dependencies. ExecutorStagePrepare ExecutorStage = "prepare" // ExecutorStageFinish means the executor has finished build execution. ExecutorStageFinish ExecutorStage = "finish" // ExecutorStageCleanup means the executor is cleaning up resources. ExecutorStageCleanup ExecutorStage = "cleanup" ) // ExecutorPrepareOptions stores any data necessary for the executor to prepare // the environment for running a build. This includes runner configuration, build data, etc. type ExecutorPrepareOptions struct { Config *RunnerConfig Build *Build BuildLogger buildlogger.Logger User string Context context.Context } type NoFreeExecutorError struct { Message string } func (e *NoFreeExecutorError) Error() string { return e.Message } // Executor represents entities responsible for build execution. // It prepares the environment, runs the build and cleans up resources. // See more in https://docs.gitlab.com/runner/executors/ type Executor interface { // Shell returns data about the shell and scripts this executor is bound to. Shell() *ShellScriptInfo // Prepare prepares the environment for build execution. e.g. connects to SSH, creates containers. Prepare(options ExecutorPrepareOptions) error // Run executes a command on the prepared environment. Run(cmd ExecutorCommand) error // Finish marks the build execution as finished. Finish(err error) // Cleanup cleans any resources left by build execution. Cleanup() // GetCurrentStage returns current stage of build execution. GetCurrentStage() ExecutorStage // SetCurrentStage sets the current stage of build execution. SetCurrentStage(stage ExecutorStage) } var ExecutorStepRunnerConnectNotSupported = fmt.Errorf("executor does not support step-runner connect") type ManagedExecutorProvider interface { // Init initializes the executor provider. // // Some providers may require that a non-trivial setup will be done for them to work properly. They may also // run a goroutines handling provider's state and management layer. // // Init method is a hook allowing to add such behavior. // // Init MUST BE NON-BLOCKING! Init() // Shutdown terminates the executor provider. // // As noted above, some executor providers may require to maintain a long-running state and management // layer. // // Shutdown method is a hook that allows to inform the executor provider that it should terminate // itself. // // Shutdown MUST BE BLOCKING until termination is done or provided context is canceled. // // First argument receives a context.Context object that will be canceled when shutting down will exceed // configured timeout. // Second argument receives the global configuration, which may be nil. Shutdown(ctx context.Context, config *Config) } // ExecutorProvider is responsible for managing the lifetime of executors, acquiring resources, // retrieving executor metadata, etc. type ExecutorProvider interface { // CanCreate returns whether the executor provider has the necessary data to create an executor. CanCreate() bool // Create creates a new executor. No resource allocation happens. Create() Executor // Acquire acquires the necessary resources for the executor to run, e.g. finds a virtual machine. Acquire(config *RunnerConfig) (ExecutorData, error) // Release releases any resources locked by Acquire. Release(config *RunnerConfig, data ExecutorData) // GetFeatures returns metadata about the features the executor supports, e.g. variables, services, shell. GetFeatures(features *FeaturesInfo) error // GetConfigInfo extracts metadata about the config the executor is using, e.g. GPUs. GetConfigInfo(input *RunnerConfig, output *ConfigInfo) // GetDefaultShell returns the name of the default shell for the executor. GetDefaultShell() string } // BuildError represents an error during build execution, not related to // the job script, e.g. failed to create container, establish ssh connection. type BuildError struct { Inner error FailureReason spec.JobFailureReason ExitCode int } // Error implements the error interface. func (b *BuildError) Error() string { if b.Inner == nil { return "error" } return b.Inner.Error() } func (b *BuildError) Is(err error) bool { buildErr, ok := err.(*BuildError) if !ok { return false } return buildErr.FailureReason == b.FailureReason } func (b *BuildError) Unwrap() error { return b.Inner } // MakeBuildError returns an new instance of BuildError. func MakeBuildError(format string, args ...interface{}) error { return &BuildError{ Inner: fmt.Errorf(format, args...), } } func ValidateExecutorProvider(provider ExecutorProvider) error { if provider.GetDefaultShell() == "" { return errors.New("default shell not implemented") } if !provider.CanCreate() { return errors.New("cannot create executor") } if err := provider.GetFeatures(&FeaturesInfo{}); err != nil { return fmt.Errorf("cannot get features: %w", err) } return nil } ================================================ FILE: common/executor_test.go ================================================ //go:build !integration package common import ( "errors" "testing" "github.com/stretchr/testify/assert" ) func TestBuildErrorIs(t *testing.T) { tests := map[string]struct { err error target error is bool }{ "two build errors with the same failure reason": { err: &BuildError{FailureReason: ScriptFailure}, target: &BuildError{FailureReason: ScriptFailure}, is: true, }, "different failure reasons": { err: &BuildError{FailureReason: ScriptFailure}, target: &BuildError{FailureReason: RunnerSystemFailure}, is: false, }, "not matching errors": { err: &BuildError{}, target: errors.New("mysterious error"), is: false, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { if tt.is { assert.ErrorIs(t, tt.err, tt.target) return } assert.NotErrorIs(t, tt.err, tt.target) }) } } func TestUnwrapBuildError(t *testing.T) { err := &BuildError{Inner: assert.AnError} // Unwraps inner error assert.ErrorIs(t, err, assert.AnError) // Stop unwrapping until BuildError is found. assert.ErrorIs(t, err, &BuildError{}) var buildErr *BuildError assert.ErrorAs(t, err, &buildErr) err = &BuildError{} // Unwraps inner error assert.NotErrorIs(t, err, assert.AnError) // Stop unwrapping until BuildError is found. assert.ErrorIs(t, err, &BuildError{}) assert.ErrorAs(t, err, &buildErr) } ================================================ FILE: common/exit_code.go ================================================ package common // NormalizeExitCode reinterprets an exit code that may have been stored as a // Windows DWORD (uint32) as a signed int32 value. // // On Windows, exit codes are 32-bit unsigned integers. For example, exit -1 // produces 0xFFFFFFFF (4294967295) which must be reinterpreted as -1. For // standard Unix exit codes in the range 0–255, this function is an identity // operation, so it is safe to apply unconditionally regardless of the host or // container OS. // // Values above math.MaxUint32 (0xFFFFFFFF) have their upper bits silently // truncated to their lower 32 bits before sign-reinterpretation. func NormalizeExitCode(code int) int { return int(int32(code)) } ================================================ FILE: common/exit_code_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/stretchr/testify/assert" ) func TestNormalizeExitCode(t *testing.T) { tests := map[string]struct { input int expected int }{ "zero": {input: 0, expected: 0}, "positive unix exit code": {input: 1, expected: 1}, "max unix exit code": {input: 255, expected: 255}, "windows DWORD -1": {input: 4294967295, expected: -1}, "windows access violation": {input: 3221225477, expected: -1073741819}, "windows DLL not found": {input: 3221225781, expected: -1073741515}, "max positive int32": {input: 2147483647, expected: 2147483647}, "int32 min (0x80000000)": {input: 2147483648, expected: -2147483648}, "negative one directly": {input: -1, expected: -1}, "value above MaxUint32 truncates to zero": {input: 4294967296, expected: 0}, // 0x1_00000000 → 0 "value above MaxUint32 truncates to negative one": {input: 8589934591, expected: -1}, // 0x1_FFFFFFFF → -1 } for name, tt := range tests { t.Run(name, func(t *testing.T) { assert.Equal(t, tt.expected, NormalizeExitCode(tt.input)) }) } } ================================================ FILE: common/failure_reason_mapper.go ================================================ package common import ( "errors" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) const ( maxMappingDepth = 10 ) var ( errMaxMappingDepthExceeded = errors.New("exceeded max mapping depth") ) type failureReasonMapper struct { supportedByGitLab []spec.JobFailureReason compatibilityMap map[spec.JobFailureReason]spec.JobFailureReason maxMappingDepth int // err is used only for tests. It allows us to check if `Map()` behavior is correct // and to validate whether the hardcoded failure reasons map creates problems like // mapping loop or too big mapping depth. err error } func newFailureReasonMapper(supported []spec.JobFailureReason) *failureReasonMapper { return &failureReasonMapper{ supportedByGitLab: append(supported, alwaysSupportedFailureReasons...), compatibilityMap: failureReasonsCompatibilityMap, maxMappingDepth: maxMappingDepth, } } func (f *failureReasonMapper) Map(reason spec.JobFailureReason) spec.JobFailureReason { f.err = nil // No specific reason means it's a script failure // (or Runner doesn't yet detect that it's something else) if reason == "" { return ScriptFailure } // If the reason is supported by GitLab - we send it as is r, found := f.findSupported(reason) if found { return r } // If the reason is not supported by GitLab - it may be a new // reason extracted from previously existing one (for example // image pulling failure was previously reported as a more general // runner system failure) r, found = f.findBackwardCompatible(reason) if found { return r } // If we can't map the reason to one supported by GitLab - // let's call it "unknown". return UnknownFailure } func (f *failureReasonMapper) findSupported(reason spec.JobFailureReason) (spec.JobFailureReason, bool) { for _, supported := range f.supportedByGitLab { if reason == supported { return reason, true } } return UnknownFailure, false } func (f *failureReasonMapper) findBackwardCompatible(reason spec.JobFailureReason) (spec.JobFailureReason, bool) { for i := 0; i < f.maxMappingDepth; i++ { mappedReason, ok := f.compatibilityMap[reason] if !ok { return UnknownFailure, false } r, ok := f.findSupported(mappedReason) if ok { return r, true } reason = mappedReason } f.err = errMaxMappingDepthExceeded return UnknownFailure, true } ================================================ FILE: common/failure_reason_mapper_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func TestFailureReasonMapper_Map(t *testing.T) { const ( frOne spec.JobFailureReason = "fr_one" frTwo spec.JobFailureReason = "fr_two" frThree spec.JobFailureReason = "fr_three" frFour spec.JobFailureReason = "fr_four" frFive spec.JobFailureReason = "fr_five" frSix spec.JobFailureReason = "fr_six" frSeven spec.JobFailureReason = "fr_seven" frEight spec.JobFailureReason = "fr_eight" frLoopOne spec.JobFailureReason = "fr_loop_one" frLoopTwo spec.JobFailureReason = "fr_loop_two" frLoopThree spec.JobFailureReason = "fr_loop_three" frLoopFour spec.JobFailureReason = "fr_loop_four" frTotallyUnknown spec.JobFailureReason = "fr_totally_unknown" maxDepth = 3 ) supported := []spec.JobFailureReason{frOne, frTwo} compatibilityMap := map[spec.JobFailureReason]spec.JobFailureReason{ frThree: frOne, frFive: frFour, frFour: frTwo, frSeven: frSix, frEight: frSeven, frLoopOne: frLoopOne, frLoopFour: frLoopThree, frLoopThree: frLoopTwo, frLoopTwo: frLoopThree, } tests := map[string]struct { run func(t *testing.T, f *failureReasonMapper) }{ "default failure": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, ScriptFailure, f.Map("")) assert.NoError(t, f.err) }, }, "always supported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, ScriptFailure, f.Map(ScriptFailure)) assert.Equal(t, RunnerSystemFailure, f.Map(RunnerSystemFailure)) assert.Equal(t, JobExecutionTimeout, f.Map(JobExecutionTimeout)) assert.NoError(t, f.err) }, }, "optionally supported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, frOne, f.Map(frOne)) assert.Equal(t, frTwo, f.Map(frTwo)) assert.NoError(t, f.err) }, }, "unsupported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frSix)) assert.NoError(t, f.err) }, }, "new directly mapped to older supported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, frOne, f.Map(frThree)) assert.Equal(t, frTwo, f.Map(frFour)) assert.NoError(t, f.err) }, }, "new indirectly mapped to older supported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, frTwo, f.Map(frFive)) assert.NoError(t, f.err) }, }, "directly mapped to unsupported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frSeven)) assert.NoError(t, f.err) }, }, "indirectly mapped to unsupported by GitLab": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frEight)) assert.NoError(t, f.err) }, }, "totally unknown reason": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frTotallyUnknown)) assert.NoError(t, f.err) }, }, "endless direct loop": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frLoopOne)) assert.ErrorIs(t, f.err, errMaxMappingDepthExceeded) }, }, "endless indirect loop": { run: func(t *testing.T, f *failureReasonMapper) { assert.Equal(t, UnknownFailure, f.Map(frLoopFour)) assert.ErrorIs(t, f.err, errMaxMappingDepthExceeded) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { f := newFailureReasonMapper(supported) f.compatibilityMap = compatibilityMap f.maxMappingDepth = maxDepth tt.run(t, f) }) } } // This tests checks if the hardcoded compatibility map introduces // mapping loops or exceeds mapping depth. In case of failures, mapping // should be fixed before introducing the change to the main branch // and releasing. func TestFailureReasonsCompatibilityMap(t *testing.T) { f := newFailureReasonMapper(nil) require.Equal(t, failureReasonsCompatibilityMap, f.compatibilityMap) for _, r := range allFailureReasons { t.Run(string(r), func(t *testing.T) { f.Map(r) assert.NoError(t, f.err) }) } } ================================================ FILE: common/labels.go ================================================ package common import ( "fmt" "regexp" ) // Rules of labels validation should be kept in sync with GitLab Rails side. // Today (September 2025) they are defined at // https://gitlab.com/gitlab-org/gitlab/-/blob/master/app/validators/json_schemas/ci_runner_labels.json const ( maxAllowedNumberOfLabels = 32 labelKeyAllowedPattern = `^[a-zA-Z0-9_][a-zA-Z0-9._-]{2,64}$` labelValueAllowedPattern = `^[a-zA-Z0-9._-]{1,256}$` ) var ( labelKeyAllowedRx = regexp.MustCompile(labelKeyAllowedPattern) labelValueAllowedRx = regexp.MustCompile(labelValueAllowedPattern) ErrInvalidLabelKey = fmt.Errorf("invalid label key, doesn't match %q", labelKeyAllowedPattern) ErrInvalidLabelValue = fmt.Errorf("invalid label value, doesn't match %q", labelValueAllowedPattern) ErrLabelsCountExceeded = fmt.Errorf("exceeded maximum computed labels number of %d", maxAllowedNumberOfLabels) ) type Labels map[string]string func (l Labels) validatePatterns() error { for key, value := range l { if !labelKeyAllowedRx.MatchString(key) { return fmt.Errorf("%w: %s", ErrInvalidLabelKey, key) } if !labelValueAllowedRx.MatchString(value) { return fmt.Errorf("%w: %s", ErrInvalidLabelValue, value) } } return nil } func (l Labels) validateCount() error { if len(l) > maxAllowedNumberOfLabels { return ErrLabelsCountExceeded } return nil } ================================================ FILE: common/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package common import ( "context" "io" "net/url" "time" mock "github.com/stretchr/testify/mock" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) // NewMockWithContext creates a new instance of MockWithContext. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockWithContext(t interface { mock.TestingT Cleanup(func()) }) *MockWithContext { mock := &MockWithContext{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockWithContext is an autogenerated mock type for the WithContext type type MockWithContext struct { mock.Mock } type MockWithContext_Expecter struct { mock *mock.Mock } func (_m *MockWithContext) EXPECT() *MockWithContext_Expecter { return &MockWithContext_Expecter{mock: &_m.Mock} } // WithContext provides a mock function for the type MockWithContext func (_mock *MockWithContext) WithContext(context1 context.Context) (context.Context, context.CancelFunc) { ret := _mock.Called(context1) if len(ret) == 0 { panic("no return value specified for WithContext") } var r0 context.Context var r1 context.CancelFunc if returnFunc, ok := ret.Get(0).(func(context.Context) (context.Context, context.CancelFunc)); ok { return returnFunc(context1) } if returnFunc, ok := ret.Get(0).(func(context.Context) context.Context); ok { r0 = returnFunc(context1) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(context.Context) } } if returnFunc, ok := ret.Get(1).(func(context.Context) context.CancelFunc); ok { r1 = returnFunc(context1) } else { if ret.Get(1) != nil { r1 = ret.Get(1).(context.CancelFunc) } } return r0, r1 } // MockWithContext_WithContext_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithContext' type MockWithContext_WithContext_Call struct { *mock.Call } // WithContext is a helper method to define mock.On call // - context1 context.Context func (_e *MockWithContext_Expecter) WithContext(context1 interface{}) *MockWithContext_WithContext_Call { return &MockWithContext_WithContext_Call{Call: _e.mock.On("WithContext", context1)} } func (_c *MockWithContext_WithContext_Call) Run(run func(context1 context.Context)) *MockWithContext_WithContext_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockWithContext_WithContext_Call) Return(context11 context.Context, cancelFunc context.CancelFunc) *MockWithContext_WithContext_Call { _c.Call.Return(context11, cancelFunc) return _c } func (_c *MockWithContext_WithContext_Call) RunAndReturn(run func(context1 context.Context) (context.Context, context.CancelFunc)) *MockWithContext_WithContext_Call { _c.Call.Return(run) return _c } // newMockUrlHelper creates a new instance of mockUrlHelper. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockUrlHelper(t interface { mock.TestingT Cleanup(func()) }) *mockUrlHelper { mock := &mockUrlHelper{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockUrlHelper is an autogenerated mock type for the urlHelper type type mockUrlHelper struct { mock.Mock } type mockUrlHelper_Expecter struct { mock *mock.Mock } func (_m *mockUrlHelper) EXPECT() *mockUrlHelper_Expecter { return &mockUrlHelper_Expecter{mock: &_m.Mock} } // GetInsteadOfs provides a mock function for the type mockUrlHelper func (_mock *mockUrlHelper) GetInsteadOfs() ([][2]string, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetInsteadOfs") } var r0 [][2]string var r1 error if returnFunc, ok := ret.Get(0).(func() ([][2]string, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() [][2]string); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).([][2]string) } } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // mockUrlHelper_GetInsteadOfs_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetInsteadOfs' type mockUrlHelper_GetInsteadOfs_Call struct { *mock.Call } // GetInsteadOfs is a helper method to define mock.On call func (_e *mockUrlHelper_Expecter) GetInsteadOfs() *mockUrlHelper_GetInsteadOfs_Call { return &mockUrlHelper_GetInsteadOfs_Call{Call: _e.mock.On("GetInsteadOfs")} } func (_c *mockUrlHelper_GetInsteadOfs_Call) Run(run func()) *mockUrlHelper_GetInsteadOfs_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockUrlHelper_GetInsteadOfs_Call) Return(stringss [][2]string, err error) *mockUrlHelper_GetInsteadOfs_Call { _c.Call.Return(stringss, err) return _c } func (_c *mockUrlHelper_GetInsteadOfs_Call) RunAndReturn(run func() ([][2]string, error)) *mockUrlHelper_GetInsteadOfs_Call { _c.Call.Return(run) return _c } // GetRemoteURL provides a mock function for the type mockUrlHelper func (_mock *mockUrlHelper) GetRemoteURL() (*url.URL, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetRemoteURL") } var r0 *url.URL var r1 error if returnFunc, ok := ret.Get(0).(func() (*url.URL, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() *url.URL); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*url.URL) } } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // mockUrlHelper_GetRemoteURL_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetRemoteURL' type mockUrlHelper_GetRemoteURL_Call struct { *mock.Call } // GetRemoteURL is a helper method to define mock.On call func (_e *mockUrlHelper_Expecter) GetRemoteURL() *mockUrlHelper_GetRemoteURL_Call { return &mockUrlHelper_GetRemoteURL_Call{Call: _e.mock.On("GetRemoteURL")} } func (_c *mockUrlHelper_GetRemoteURL_Call) Run(run func()) *mockUrlHelper_GetRemoteURL_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockUrlHelper_GetRemoteURL_Call) Return(uRL *url.URL, err error) *mockUrlHelper_GetRemoteURL_Call { _c.Call.Return(uRL, err) return _c } func (_c *mockUrlHelper_GetRemoteURL_Call) RunAndReturn(run func() (*url.URL, error)) *mockUrlHelper_GetRemoteURL_Call { _c.Call.Return(run) return _c } // NewMockCommander creates a new instance of MockCommander. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockCommander(t interface { mock.TestingT Cleanup(func()) }) *MockCommander { mock := &MockCommander{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockCommander is an autogenerated mock type for the Commander type type MockCommander struct { mock.Mock } type MockCommander_Expecter struct { mock *mock.Mock } func (_m *MockCommander) EXPECT() *MockCommander_Expecter { return &MockCommander_Expecter{mock: &_m.Mock} } // Execute provides a mock function for the type MockCommander func (_mock *MockCommander) Execute(c *cli.Context) { _mock.Called(c) return } // MockCommander_Execute_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Execute' type MockCommander_Execute_Call struct { *mock.Call } // Execute is a helper method to define mock.On call // - c *cli.Context func (_e *MockCommander_Expecter) Execute(c interface{}) *MockCommander_Execute_Call { return &MockCommander_Execute_Call{Call: _e.mock.On("Execute", c)} } func (_c *MockCommander_Execute_Call) Run(run func(c *cli.Context)) *MockCommander_Execute_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *cli.Context if args[0] != nil { arg0 = args[0].(*cli.Context) } run( arg0, ) }) return _c } func (_c *MockCommander_Execute_Call) Return() *MockCommander_Execute_Call { _c.Call.Return() return _c } func (_c *MockCommander_Execute_Call) RunAndReturn(run func(c *cli.Context)) *MockCommander_Execute_Call { _c.Run(run) return _c } // NewMockConfigSaver creates a new instance of MockConfigSaver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockConfigSaver(t interface { mock.TestingT Cleanup(func()) }) *MockConfigSaver { mock := &MockConfigSaver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockConfigSaver is an autogenerated mock type for the ConfigSaver type type MockConfigSaver struct { mock.Mock } type MockConfigSaver_Expecter struct { mock *mock.Mock } func (_m *MockConfigSaver) EXPECT() *MockConfigSaver_Expecter { return &MockConfigSaver_Expecter{mock: &_m.Mock} } // Save provides a mock function for the type MockConfigSaver func (_mock *MockConfigSaver) Save(filePath string, data []byte) error { ret := _mock.Called(filePath, data) if len(ret) == 0 { panic("no return value specified for Save") } var r0 error if returnFunc, ok := ret.Get(0).(func(string, []byte) error); ok { r0 = returnFunc(filePath, data) } else { r0 = ret.Error(0) } return r0 } // MockConfigSaver_Save_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Save' type MockConfigSaver_Save_Call struct { *mock.Call } // Save is a helper method to define mock.On call // - filePath string // - data []byte func (_e *MockConfigSaver_Expecter) Save(filePath interface{}, data interface{}) *MockConfigSaver_Save_Call { return &MockConfigSaver_Save_Call{Call: _e.mock.On("Save", filePath, data)} } func (_c *MockConfigSaver_Save_Call) Run(run func(filePath string, data []byte)) *MockConfigSaver_Save_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 string if args[0] != nil { arg0 = args[0].(string) } var arg1 []byte if args[1] != nil { arg1 = args[1].([]byte) } run( arg0, arg1, ) }) return _c } func (_c *MockConfigSaver_Save_Call) Return(err error) *MockConfigSaver_Save_Call { _c.Call.Return(err) return _c } func (_c *MockConfigSaver_Save_Call) RunAndReturn(run func(filePath string, data []byte) error) *MockConfigSaver_Save_Call { _c.Call.Return(run) return _c } // NewMockExecutorData creates a new instance of MockExecutorData. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockExecutorData(t interface { mock.TestingT Cleanup(func()) }) *MockExecutorData { mock := &MockExecutorData{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockExecutorData is an autogenerated mock type for the ExecutorData type type MockExecutorData struct { mock.Mock } type MockExecutorData_Expecter struct { mock *mock.Mock } func (_m *MockExecutorData) EXPECT() *MockExecutorData_Expecter { return &MockExecutorData_Expecter{mock: &_m.Mock} } // NewMockExecutorDataLogger creates a new instance of MockExecutorDataLogger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockExecutorDataLogger(t interface { mock.TestingT Cleanup(func()) }) *MockExecutorDataLogger { mock := &MockExecutorDataLogger{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockExecutorDataLogger is an autogenerated mock type for the ExecutorDataLogger type type MockExecutorDataLogger struct { mock.Mock } type MockExecutorDataLogger_Expecter struct { mock *mock.Mock } func (_m *MockExecutorDataLogger) EXPECT() *MockExecutorDataLogger_Expecter { return &MockExecutorDataLogger_Expecter{mock: &_m.Mock} } // LogFields provides a mock function for the type MockExecutorDataLogger func (_mock *MockExecutorDataLogger) LogFields() map[string]string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for LogFields") } var r0 map[string]string if returnFunc, ok := ret.Get(0).(func() map[string]string); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(map[string]string) } } return r0 } // MockExecutorDataLogger_LogFields_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LogFields' type MockExecutorDataLogger_LogFields_Call struct { *mock.Call } // LogFields is a helper method to define mock.On call func (_e *MockExecutorDataLogger_Expecter) LogFields() *MockExecutorDataLogger_LogFields_Call { return &MockExecutorDataLogger_LogFields_Call{Call: _e.mock.On("LogFields")} } func (_c *MockExecutorDataLogger_LogFields_Call) Run(run func()) *MockExecutorDataLogger_LogFields_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutorDataLogger_LogFields_Call) Return(stringToString map[string]string) *MockExecutorDataLogger_LogFields_Call { _c.Call.Return(stringToString) return _c } func (_c *MockExecutorDataLogger_LogFields_Call) RunAndReturn(run func() map[string]string) *MockExecutorDataLogger_LogFields_Call { _c.Call.Return(run) return _c } // NewMockExecutor creates a new instance of MockExecutor. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockExecutor(t interface { mock.TestingT Cleanup(func()) }) *MockExecutor { mock := &MockExecutor{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockExecutor is an autogenerated mock type for the Executor type type MockExecutor struct { mock.Mock } type MockExecutor_Expecter struct { mock *mock.Mock } func (_m *MockExecutor) EXPECT() *MockExecutor_Expecter { return &MockExecutor_Expecter{mock: &_m.Mock} } // Cleanup provides a mock function for the type MockExecutor func (_mock *MockExecutor) Cleanup() { _mock.Called() return } // MockExecutor_Cleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Cleanup' type MockExecutor_Cleanup_Call struct { *mock.Call } // Cleanup is a helper method to define mock.On call func (_e *MockExecutor_Expecter) Cleanup() *MockExecutor_Cleanup_Call { return &MockExecutor_Cleanup_Call{Call: _e.mock.On("Cleanup")} } func (_c *MockExecutor_Cleanup_Call) Run(run func()) *MockExecutor_Cleanup_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutor_Cleanup_Call) Return() *MockExecutor_Cleanup_Call { _c.Call.Return() return _c } func (_c *MockExecutor_Cleanup_Call) RunAndReturn(run func()) *MockExecutor_Cleanup_Call { _c.Run(run) return _c } // Finish provides a mock function for the type MockExecutor func (_mock *MockExecutor) Finish(err error) { _mock.Called(err) return } // MockExecutor_Finish_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Finish' type MockExecutor_Finish_Call struct { *mock.Call } // Finish is a helper method to define mock.On call // - err error func (_e *MockExecutor_Expecter) Finish(err interface{}) *MockExecutor_Finish_Call { return &MockExecutor_Finish_Call{Call: _e.mock.On("Finish", err)} } func (_c *MockExecutor_Finish_Call) Run(run func(err error)) *MockExecutor_Finish_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 error if args[0] != nil { arg0 = args[0].(error) } run( arg0, ) }) return _c } func (_c *MockExecutor_Finish_Call) Return() *MockExecutor_Finish_Call { _c.Call.Return() return _c } func (_c *MockExecutor_Finish_Call) RunAndReturn(run func(err error)) *MockExecutor_Finish_Call { _c.Run(run) return _c } // GetCurrentStage provides a mock function for the type MockExecutor func (_mock *MockExecutor) GetCurrentStage() ExecutorStage { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetCurrentStage") } var r0 ExecutorStage if returnFunc, ok := ret.Get(0).(func() ExecutorStage); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(ExecutorStage) } return r0 } // MockExecutor_GetCurrentStage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetCurrentStage' type MockExecutor_GetCurrentStage_Call struct { *mock.Call } // GetCurrentStage is a helper method to define mock.On call func (_e *MockExecutor_Expecter) GetCurrentStage() *MockExecutor_GetCurrentStage_Call { return &MockExecutor_GetCurrentStage_Call{Call: _e.mock.On("GetCurrentStage")} } func (_c *MockExecutor_GetCurrentStage_Call) Run(run func()) *MockExecutor_GetCurrentStage_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutor_GetCurrentStage_Call) Return(executorStage ExecutorStage) *MockExecutor_GetCurrentStage_Call { _c.Call.Return(executorStage) return _c } func (_c *MockExecutor_GetCurrentStage_Call) RunAndReturn(run func() ExecutorStage) *MockExecutor_GetCurrentStage_Call { _c.Call.Return(run) return _c } // Prepare provides a mock function for the type MockExecutor func (_mock *MockExecutor) Prepare(options ExecutorPrepareOptions) error { ret := _mock.Called(options) if len(ret) == 0 { panic("no return value specified for Prepare") } var r0 error if returnFunc, ok := ret.Get(0).(func(ExecutorPrepareOptions) error); ok { r0 = returnFunc(options) } else { r0 = ret.Error(0) } return r0 } // MockExecutor_Prepare_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Prepare' type MockExecutor_Prepare_Call struct { *mock.Call } // Prepare is a helper method to define mock.On call // - options ExecutorPrepareOptions func (_e *MockExecutor_Expecter) Prepare(options interface{}) *MockExecutor_Prepare_Call { return &MockExecutor_Prepare_Call{Call: _e.mock.On("Prepare", options)} } func (_c *MockExecutor_Prepare_Call) Run(run func(options ExecutorPrepareOptions)) *MockExecutor_Prepare_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ExecutorPrepareOptions if args[0] != nil { arg0 = args[0].(ExecutorPrepareOptions) } run( arg0, ) }) return _c } func (_c *MockExecutor_Prepare_Call) Return(err error) *MockExecutor_Prepare_Call { _c.Call.Return(err) return _c } func (_c *MockExecutor_Prepare_Call) RunAndReturn(run func(options ExecutorPrepareOptions) error) *MockExecutor_Prepare_Call { _c.Call.Return(run) return _c } // Run provides a mock function for the type MockExecutor func (_mock *MockExecutor) Run(cmd ExecutorCommand) error { ret := _mock.Called(cmd) if len(ret) == 0 { panic("no return value specified for Run") } var r0 error if returnFunc, ok := ret.Get(0).(func(ExecutorCommand) error); ok { r0 = returnFunc(cmd) } else { r0 = ret.Error(0) } return r0 } // MockExecutor_Run_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Run' type MockExecutor_Run_Call struct { *mock.Call } // Run is a helper method to define mock.On call // - cmd ExecutorCommand func (_e *MockExecutor_Expecter) Run(cmd interface{}) *MockExecutor_Run_Call { return &MockExecutor_Run_Call{Call: _e.mock.On("Run", cmd)} } func (_c *MockExecutor_Run_Call) Run(run func(cmd ExecutorCommand)) *MockExecutor_Run_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ExecutorCommand if args[0] != nil { arg0 = args[0].(ExecutorCommand) } run( arg0, ) }) return _c } func (_c *MockExecutor_Run_Call) Return(err error) *MockExecutor_Run_Call { _c.Call.Return(err) return _c } func (_c *MockExecutor_Run_Call) RunAndReturn(run func(cmd ExecutorCommand) error) *MockExecutor_Run_Call { _c.Call.Return(run) return _c } // SetCurrentStage provides a mock function for the type MockExecutor func (_mock *MockExecutor) SetCurrentStage(stage ExecutorStage) { _mock.Called(stage) return } // MockExecutor_SetCurrentStage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCurrentStage' type MockExecutor_SetCurrentStage_Call struct { *mock.Call } // SetCurrentStage is a helper method to define mock.On call // - stage ExecutorStage func (_e *MockExecutor_Expecter) SetCurrentStage(stage interface{}) *MockExecutor_SetCurrentStage_Call { return &MockExecutor_SetCurrentStage_Call{Call: _e.mock.On("SetCurrentStage", stage)} } func (_c *MockExecutor_SetCurrentStage_Call) Run(run func(stage ExecutorStage)) *MockExecutor_SetCurrentStage_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ExecutorStage if args[0] != nil { arg0 = args[0].(ExecutorStage) } run( arg0, ) }) return _c } func (_c *MockExecutor_SetCurrentStage_Call) Return() *MockExecutor_SetCurrentStage_Call { _c.Call.Return() return _c } func (_c *MockExecutor_SetCurrentStage_Call) RunAndReturn(run func(stage ExecutorStage)) *MockExecutor_SetCurrentStage_Call { _c.Run(run) return _c } // Shell provides a mock function for the type MockExecutor func (_mock *MockExecutor) Shell() *ShellScriptInfo { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Shell") } var r0 *ShellScriptInfo if returnFunc, ok := ret.Get(0).(func() *ShellScriptInfo); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*ShellScriptInfo) } } return r0 } // MockExecutor_Shell_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Shell' type MockExecutor_Shell_Call struct { *mock.Call } // Shell is a helper method to define mock.On call func (_e *MockExecutor_Expecter) Shell() *MockExecutor_Shell_Call { return &MockExecutor_Shell_Call{Call: _e.mock.On("Shell")} } func (_c *MockExecutor_Shell_Call) Run(run func()) *MockExecutor_Shell_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutor_Shell_Call) Return(shellScriptInfo *ShellScriptInfo) *MockExecutor_Shell_Call { _c.Call.Return(shellScriptInfo) return _c } func (_c *MockExecutor_Shell_Call) RunAndReturn(run func() *ShellScriptInfo) *MockExecutor_Shell_Call { _c.Call.Return(run) return _c } // NewMockManagedExecutorProvider creates a new instance of MockManagedExecutorProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockManagedExecutorProvider(t interface { mock.TestingT Cleanup(func()) }) *MockManagedExecutorProvider { mock := &MockManagedExecutorProvider{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockManagedExecutorProvider is an autogenerated mock type for the ManagedExecutorProvider type type MockManagedExecutorProvider struct { mock.Mock } type MockManagedExecutorProvider_Expecter struct { mock *mock.Mock } func (_m *MockManagedExecutorProvider) EXPECT() *MockManagedExecutorProvider_Expecter { return &MockManagedExecutorProvider_Expecter{mock: &_m.Mock} } // Init provides a mock function for the type MockManagedExecutorProvider func (_mock *MockManagedExecutorProvider) Init() { _mock.Called() return } // MockManagedExecutorProvider_Init_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Init' type MockManagedExecutorProvider_Init_Call struct { *mock.Call } // Init is a helper method to define mock.On call func (_e *MockManagedExecutorProvider_Expecter) Init() *MockManagedExecutorProvider_Init_Call { return &MockManagedExecutorProvider_Init_Call{Call: _e.mock.On("Init")} } func (_c *MockManagedExecutorProvider_Init_Call) Run(run func()) *MockManagedExecutorProvider_Init_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockManagedExecutorProvider_Init_Call) Return() *MockManagedExecutorProvider_Init_Call { _c.Call.Return() return _c } func (_c *MockManagedExecutorProvider_Init_Call) RunAndReturn(run func()) *MockManagedExecutorProvider_Init_Call { _c.Run(run) return _c } // Shutdown provides a mock function for the type MockManagedExecutorProvider func (_mock *MockManagedExecutorProvider) Shutdown(ctx context.Context, config *Config) { _mock.Called(ctx, config) return } // MockManagedExecutorProvider_Shutdown_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Shutdown' type MockManagedExecutorProvider_Shutdown_Call struct { *mock.Call } // Shutdown is a helper method to define mock.On call // - ctx context.Context // - config *Config func (_e *MockManagedExecutorProvider_Expecter) Shutdown(ctx interface{}, config interface{}) *MockManagedExecutorProvider_Shutdown_Call { return &MockManagedExecutorProvider_Shutdown_Call{Call: _e.mock.On("Shutdown", ctx, config)} } func (_c *MockManagedExecutorProvider_Shutdown_Call) Run(run func(ctx context.Context, config *Config)) *MockManagedExecutorProvider_Shutdown_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 *Config if args[1] != nil { arg1 = args[1].(*Config) } run( arg0, arg1, ) }) return _c } func (_c *MockManagedExecutorProvider_Shutdown_Call) Return() *MockManagedExecutorProvider_Shutdown_Call { _c.Call.Return() return _c } func (_c *MockManagedExecutorProvider_Shutdown_Call) RunAndReturn(run func(ctx context.Context, config *Config)) *MockManagedExecutorProvider_Shutdown_Call { _c.Run(run) return _c } // NewMockExecutorProvider creates a new instance of MockExecutorProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockExecutorProvider(t interface { mock.TestingT Cleanup(func()) }) *MockExecutorProvider { mock := &MockExecutorProvider{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockExecutorProvider is an autogenerated mock type for the ExecutorProvider type type MockExecutorProvider struct { mock.Mock } type MockExecutorProvider_Expecter struct { mock *mock.Mock } func (_m *MockExecutorProvider) EXPECT() *MockExecutorProvider_Expecter { return &MockExecutorProvider_Expecter{mock: &_m.Mock} } // Acquire provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) Acquire(config *RunnerConfig) (ExecutorData, error) { ret := _mock.Called(config) if len(ret) == 0 { panic("no return value specified for Acquire") } var r0 ExecutorData var r1 error if returnFunc, ok := ret.Get(0).(func(*RunnerConfig) (ExecutorData, error)); ok { return returnFunc(config) } if returnFunc, ok := ret.Get(0).(func(*RunnerConfig) ExecutorData); ok { r0 = returnFunc(config) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(ExecutorData) } } if returnFunc, ok := ret.Get(1).(func(*RunnerConfig) error); ok { r1 = returnFunc(config) } else { r1 = ret.Error(1) } return r0, r1 } // MockExecutorProvider_Acquire_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Acquire' type MockExecutorProvider_Acquire_Call struct { *mock.Call } // Acquire is a helper method to define mock.On call // - config *RunnerConfig func (_e *MockExecutorProvider_Expecter) Acquire(config interface{}) *MockExecutorProvider_Acquire_Call { return &MockExecutorProvider_Acquire_Call{Call: _e.mock.On("Acquire", config)} } func (_c *MockExecutorProvider_Acquire_Call) Run(run func(config *RunnerConfig)) *MockExecutorProvider_Acquire_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *RunnerConfig if args[0] != nil { arg0 = args[0].(*RunnerConfig) } run( arg0, ) }) return _c } func (_c *MockExecutorProvider_Acquire_Call) Return(executorData ExecutorData, err error) *MockExecutorProvider_Acquire_Call { _c.Call.Return(executorData, err) return _c } func (_c *MockExecutorProvider_Acquire_Call) RunAndReturn(run func(config *RunnerConfig) (ExecutorData, error)) *MockExecutorProvider_Acquire_Call { _c.Call.Return(run) return _c } // CanCreate provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) CanCreate() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for CanCreate") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockExecutorProvider_CanCreate_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CanCreate' type MockExecutorProvider_CanCreate_Call struct { *mock.Call } // CanCreate is a helper method to define mock.On call func (_e *MockExecutorProvider_Expecter) CanCreate() *MockExecutorProvider_CanCreate_Call { return &MockExecutorProvider_CanCreate_Call{Call: _e.mock.On("CanCreate")} } func (_c *MockExecutorProvider_CanCreate_Call) Run(run func()) *MockExecutorProvider_CanCreate_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutorProvider_CanCreate_Call) Return(b bool) *MockExecutorProvider_CanCreate_Call { _c.Call.Return(b) return _c } func (_c *MockExecutorProvider_CanCreate_Call) RunAndReturn(run func() bool) *MockExecutorProvider_CanCreate_Call { _c.Call.Return(run) return _c } // Create provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) Create() Executor { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Create") } var r0 Executor if returnFunc, ok := ret.Get(0).(func() Executor); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(Executor) } } return r0 } // MockExecutorProvider_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' type MockExecutorProvider_Create_Call struct { *mock.Call } // Create is a helper method to define mock.On call func (_e *MockExecutorProvider_Expecter) Create() *MockExecutorProvider_Create_Call { return &MockExecutorProvider_Create_Call{Call: _e.mock.On("Create")} } func (_c *MockExecutorProvider_Create_Call) Run(run func()) *MockExecutorProvider_Create_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutorProvider_Create_Call) Return(executor Executor) *MockExecutorProvider_Create_Call { _c.Call.Return(executor) return _c } func (_c *MockExecutorProvider_Create_Call) RunAndReturn(run func() Executor) *MockExecutorProvider_Create_Call { _c.Call.Return(run) return _c } // GetConfigInfo provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) GetConfigInfo(input *RunnerConfig, output *ConfigInfo) { _mock.Called(input, output) return } // MockExecutorProvider_GetConfigInfo_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetConfigInfo' type MockExecutorProvider_GetConfigInfo_Call struct { *mock.Call } // GetConfigInfo is a helper method to define mock.On call // - input *RunnerConfig // - output *ConfigInfo func (_e *MockExecutorProvider_Expecter) GetConfigInfo(input interface{}, output interface{}) *MockExecutorProvider_GetConfigInfo_Call { return &MockExecutorProvider_GetConfigInfo_Call{Call: _e.mock.On("GetConfigInfo", input, output)} } func (_c *MockExecutorProvider_GetConfigInfo_Call) Run(run func(input *RunnerConfig, output *ConfigInfo)) *MockExecutorProvider_GetConfigInfo_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *RunnerConfig if args[0] != nil { arg0 = args[0].(*RunnerConfig) } var arg1 *ConfigInfo if args[1] != nil { arg1 = args[1].(*ConfigInfo) } run( arg0, arg1, ) }) return _c } func (_c *MockExecutorProvider_GetConfigInfo_Call) Return() *MockExecutorProvider_GetConfigInfo_Call { _c.Call.Return() return _c } func (_c *MockExecutorProvider_GetConfigInfo_Call) RunAndReturn(run func(input *RunnerConfig, output *ConfigInfo)) *MockExecutorProvider_GetConfigInfo_Call { _c.Run(run) return _c } // GetDefaultShell provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) GetDefaultShell() string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetDefaultShell") } var r0 string if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } return r0 } // MockExecutorProvider_GetDefaultShell_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetDefaultShell' type MockExecutorProvider_GetDefaultShell_Call struct { *mock.Call } // GetDefaultShell is a helper method to define mock.On call func (_e *MockExecutorProvider_Expecter) GetDefaultShell() *MockExecutorProvider_GetDefaultShell_Call { return &MockExecutorProvider_GetDefaultShell_Call{Call: _e.mock.On("GetDefaultShell")} } func (_c *MockExecutorProvider_GetDefaultShell_Call) Run(run func()) *MockExecutorProvider_GetDefaultShell_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockExecutorProvider_GetDefaultShell_Call) Return(s string) *MockExecutorProvider_GetDefaultShell_Call { _c.Call.Return(s) return _c } func (_c *MockExecutorProvider_GetDefaultShell_Call) RunAndReturn(run func() string) *MockExecutorProvider_GetDefaultShell_Call { _c.Call.Return(run) return _c } // GetFeatures provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) GetFeatures(features *FeaturesInfo) error { ret := _mock.Called(features) if len(ret) == 0 { panic("no return value specified for GetFeatures") } var r0 error if returnFunc, ok := ret.Get(0).(func(*FeaturesInfo) error); ok { r0 = returnFunc(features) } else { r0 = ret.Error(0) } return r0 } // MockExecutorProvider_GetFeatures_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetFeatures' type MockExecutorProvider_GetFeatures_Call struct { *mock.Call } // GetFeatures is a helper method to define mock.On call // - features *FeaturesInfo func (_e *MockExecutorProvider_Expecter) GetFeatures(features interface{}) *MockExecutorProvider_GetFeatures_Call { return &MockExecutorProvider_GetFeatures_Call{Call: _e.mock.On("GetFeatures", features)} } func (_c *MockExecutorProvider_GetFeatures_Call) Run(run func(features *FeaturesInfo)) *MockExecutorProvider_GetFeatures_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *FeaturesInfo if args[0] != nil { arg0 = args[0].(*FeaturesInfo) } run( arg0, ) }) return _c } func (_c *MockExecutorProvider_GetFeatures_Call) Return(err error) *MockExecutorProvider_GetFeatures_Call { _c.Call.Return(err) return _c } func (_c *MockExecutorProvider_GetFeatures_Call) RunAndReturn(run func(features *FeaturesInfo) error) *MockExecutorProvider_GetFeatures_Call { _c.Call.Return(run) return _c } // Release provides a mock function for the type MockExecutorProvider func (_mock *MockExecutorProvider) Release(config *RunnerConfig, data ExecutorData) { _mock.Called(config, data) return } // MockExecutorProvider_Release_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Release' type MockExecutorProvider_Release_Call struct { *mock.Call } // Release is a helper method to define mock.On call // - config *RunnerConfig // - data ExecutorData func (_e *MockExecutorProvider_Expecter) Release(config interface{}, data interface{}) *MockExecutorProvider_Release_Call { return &MockExecutorProvider_Release_Call{Call: _e.mock.On("Release", config, data)} } func (_c *MockExecutorProvider_Release_Call) Run(run func(config *RunnerConfig, data ExecutorData)) *MockExecutorProvider_Release_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *RunnerConfig if args[0] != nil { arg0 = args[0].(*RunnerConfig) } var arg1 ExecutorData if args[1] != nil { arg1 = args[1].(ExecutorData) } run( arg0, arg1, ) }) return _c } func (_c *MockExecutorProvider_Release_Call) Return() *MockExecutorProvider_Release_Call { _c.Call.Return() return _c } func (_c *MockExecutorProvider_Release_Call) RunAndReturn(run func(config *RunnerConfig, data ExecutorData)) *MockExecutorProvider_Release_Call { _c.Run(run) return _c } // NewMockContentProvider creates a new instance of MockContentProvider. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockContentProvider(t interface { mock.TestingT Cleanup(func()) }) *MockContentProvider { mock := &MockContentProvider{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockContentProvider is an autogenerated mock type for the ContentProvider type type MockContentProvider struct { mock.Mock } type MockContentProvider_Expecter struct { mock *mock.Mock } func (_m *MockContentProvider) EXPECT() *MockContentProvider_Expecter { return &MockContentProvider_Expecter{mock: &_m.Mock} } // GetContentLength provides a mock function for the type MockContentProvider func (_mock *MockContentProvider) GetContentLength() (int64, bool) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetContentLength") } var r0 int64 var r1 bool if returnFunc, ok := ret.Get(0).(func() (int64, bool)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() int64); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(int64) } if returnFunc, ok := ret.Get(1).(func() bool); ok { r1 = returnFunc() } else { r1 = ret.Get(1).(bool) } return r0, r1 } // MockContentProvider_GetContentLength_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetContentLength' type MockContentProvider_GetContentLength_Call struct { *mock.Call } // GetContentLength is a helper method to define mock.On call func (_e *MockContentProvider_Expecter) GetContentLength() *MockContentProvider_GetContentLength_Call { return &MockContentProvider_GetContentLength_Call{Call: _e.mock.On("GetContentLength")} } func (_c *MockContentProvider_GetContentLength_Call) Run(run func()) *MockContentProvider_GetContentLength_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockContentProvider_GetContentLength_Call) Return(n int64, b bool) *MockContentProvider_GetContentLength_Call { _c.Call.Return(n, b) return _c } func (_c *MockContentProvider_GetContentLength_Call) RunAndReturn(run func() (int64, bool)) *MockContentProvider_GetContentLength_Call { _c.Call.Return(run) return _c } // GetReader provides a mock function for the type MockContentProvider func (_mock *MockContentProvider) GetReader() (io.ReadCloser, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetReader") } var r0 io.ReadCloser var r1 error if returnFunc, ok := ret.Get(0).(func() (io.ReadCloser, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() io.ReadCloser); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(io.ReadCloser) } } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // MockContentProvider_GetReader_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetReader' type MockContentProvider_GetReader_Call struct { *mock.Call } // GetReader is a helper method to define mock.On call func (_e *MockContentProvider_Expecter) GetReader() *MockContentProvider_GetReader_Call { return &MockContentProvider_GetReader_Call{Call: _e.mock.On("GetReader")} } func (_c *MockContentProvider_GetReader_Call) Run(run func()) *MockContentProvider_GetReader_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockContentProvider_GetReader_Call) Return(readCloser io.ReadCloser, err error) *MockContentProvider_GetReader_Call { _c.Call.Return(readCloser, err) return _c } func (_c *MockContentProvider_GetReader_Call) RunAndReturn(run func() (io.ReadCloser, error)) *MockContentProvider_GetReader_Call { _c.Call.Return(run) return _c } // NewMockFailuresCollector creates a new instance of MockFailuresCollector. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockFailuresCollector(t interface { mock.TestingT Cleanup(func()) }) *MockFailuresCollector { mock := &MockFailuresCollector{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockFailuresCollector is an autogenerated mock type for the FailuresCollector type type MockFailuresCollector struct { mock.Mock } type MockFailuresCollector_Expecter struct { mock *mock.Mock } func (_m *MockFailuresCollector) EXPECT() *MockFailuresCollector_Expecter { return &MockFailuresCollector_Expecter{mock: &_m.Mock} } // RecordFailure provides a mock function for the type MockFailuresCollector func (_mock *MockFailuresCollector) RecordFailure(reason spec.JobFailureReason, runnerConfig RunnerConfig, mode JobExecutionMode) { _mock.Called(reason, runnerConfig, mode) return } // MockFailuresCollector_RecordFailure_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RecordFailure' type MockFailuresCollector_RecordFailure_Call struct { *mock.Call } // RecordFailure is a helper method to define mock.On call // - reason spec.JobFailureReason // - runnerConfig RunnerConfig // - mode JobExecutionMode func (_e *MockFailuresCollector_Expecter) RecordFailure(reason interface{}, runnerConfig interface{}, mode interface{}) *MockFailuresCollector_RecordFailure_Call { return &MockFailuresCollector_RecordFailure_Call{Call: _e.mock.On("RecordFailure", reason, runnerConfig, mode)} } func (_c *MockFailuresCollector_RecordFailure_Call) Run(run func(reason spec.JobFailureReason, runnerConfig RunnerConfig, mode JobExecutionMode)) *MockFailuresCollector_RecordFailure_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 spec.JobFailureReason if args[0] != nil { arg0 = args[0].(spec.JobFailureReason) } var arg1 RunnerConfig if args[1] != nil { arg1 = args[1].(RunnerConfig) } var arg2 JobExecutionMode if args[2] != nil { arg2 = args[2].(JobExecutionMode) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockFailuresCollector_RecordFailure_Call) Return() *MockFailuresCollector_RecordFailure_Call { _c.Call.Return() return _c } func (_c *MockFailuresCollector_RecordFailure_Call) RunAndReturn(run func(reason spec.JobFailureReason, runnerConfig RunnerConfig, mode JobExecutionMode)) *MockFailuresCollector_RecordFailure_Call { _c.Run(run) return _c } // NewMockSupportedFailureReasonMapper creates a new instance of MockSupportedFailureReasonMapper. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockSupportedFailureReasonMapper(t interface { mock.TestingT Cleanup(func()) }) *MockSupportedFailureReasonMapper { mock := &MockSupportedFailureReasonMapper{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockSupportedFailureReasonMapper is an autogenerated mock type for the SupportedFailureReasonMapper type type MockSupportedFailureReasonMapper struct { mock.Mock } type MockSupportedFailureReasonMapper_Expecter struct { mock *mock.Mock } func (_m *MockSupportedFailureReasonMapper) EXPECT() *MockSupportedFailureReasonMapper_Expecter { return &MockSupportedFailureReasonMapper_Expecter{mock: &_m.Mock} } // Map provides a mock function for the type MockSupportedFailureReasonMapper func (_mock *MockSupportedFailureReasonMapper) Map(fr spec.JobFailureReason) spec.JobFailureReason { ret := _mock.Called(fr) if len(ret) == 0 { panic("no return value specified for Map") } var r0 spec.JobFailureReason if returnFunc, ok := ret.Get(0).(func(spec.JobFailureReason) spec.JobFailureReason); ok { r0 = returnFunc(fr) } else { r0 = ret.Get(0).(spec.JobFailureReason) } return r0 } // MockSupportedFailureReasonMapper_Map_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Map' type MockSupportedFailureReasonMapper_Map_Call struct { *mock.Call } // Map is a helper method to define mock.On call // - fr spec.JobFailureReason func (_e *MockSupportedFailureReasonMapper_Expecter) Map(fr interface{}) *MockSupportedFailureReasonMapper_Map_Call { return &MockSupportedFailureReasonMapper_Map_Call{Call: _e.mock.On("Map", fr)} } func (_c *MockSupportedFailureReasonMapper_Map_Call) Run(run func(fr spec.JobFailureReason)) *MockSupportedFailureReasonMapper_Map_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 spec.JobFailureReason if args[0] != nil { arg0 = args[0].(spec.JobFailureReason) } run( arg0, ) }) return _c } func (_c *MockSupportedFailureReasonMapper_Map_Call) Return(jobFailureReason spec.JobFailureReason) *MockSupportedFailureReasonMapper_Map_Call { _c.Call.Return(jobFailureReason) return _c } func (_c *MockSupportedFailureReasonMapper_Map_Call) RunAndReturn(run func(fr spec.JobFailureReason) spec.JobFailureReason) *MockSupportedFailureReasonMapper_Map_Call { _c.Call.Return(run) return _c } // NewMockJobTrace creates a new instance of MockJobTrace. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockJobTrace(t interface { mock.TestingT Cleanup(func()) }) *MockJobTrace { mock := &MockJobTrace{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockJobTrace is an autogenerated mock type for the JobTrace type type MockJobTrace struct { mock.Mock } type MockJobTrace_Expecter struct { mock *mock.Mock } func (_m *MockJobTrace) EXPECT() *MockJobTrace_Expecter { return &MockJobTrace_Expecter{mock: &_m.Mock} } // Abort provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Abort() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Abort") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockJobTrace_Abort_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Abort' type MockJobTrace_Abort_Call struct { *mock.Call } // Abort is a helper method to define mock.On call func (_e *MockJobTrace_Expecter) Abort() *MockJobTrace_Abort_Call { return &MockJobTrace_Abort_Call{Call: _e.mock.On("Abort")} } func (_c *MockJobTrace_Abort_Call) Run(run func()) *MockJobTrace_Abort_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockJobTrace_Abort_Call) Return(b bool) *MockJobTrace_Abort_Call { _c.Call.Return(b) return _c } func (_c *MockJobTrace_Abort_Call) RunAndReturn(run func() bool) *MockJobTrace_Abort_Call { _c.Call.Return(run) return _c } // Cancel provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Cancel() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Cancel") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockJobTrace_Cancel_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Cancel' type MockJobTrace_Cancel_Call struct { *mock.Call } // Cancel is a helper method to define mock.On call func (_e *MockJobTrace_Expecter) Cancel() *MockJobTrace_Cancel_Call { return &MockJobTrace_Cancel_Call{Call: _e.mock.On("Cancel")} } func (_c *MockJobTrace_Cancel_Call) Run(run func()) *MockJobTrace_Cancel_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockJobTrace_Cancel_Call) Return(b bool) *MockJobTrace_Cancel_Call { _c.Call.Return(b) return _c } func (_c *MockJobTrace_Cancel_Call) RunAndReturn(run func() bool) *MockJobTrace_Cancel_Call { _c.Call.Return(run) return _c } // Fail provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Fail(err error, failureData JobFailureData) error { ret := _mock.Called(err, failureData) if len(ret) == 0 { panic("no return value specified for Fail") } var r0 error if returnFunc, ok := ret.Get(0).(func(error, JobFailureData) error); ok { r0 = returnFunc(err, failureData) } else { r0 = ret.Error(0) } return r0 } // MockJobTrace_Fail_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Fail' type MockJobTrace_Fail_Call struct { *mock.Call } // Fail is a helper method to define mock.On call // - err error // - failureData JobFailureData func (_e *MockJobTrace_Expecter) Fail(err interface{}, failureData interface{}) *MockJobTrace_Fail_Call { return &MockJobTrace_Fail_Call{Call: _e.mock.On("Fail", err, failureData)} } func (_c *MockJobTrace_Fail_Call) Run(run func(err error, failureData JobFailureData)) *MockJobTrace_Fail_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 error if args[0] != nil { arg0 = args[0].(error) } var arg1 JobFailureData if args[1] != nil { arg1 = args[1].(JobFailureData) } run( arg0, arg1, ) }) return _c } func (_c *MockJobTrace_Fail_Call) Return(err1 error) *MockJobTrace_Fail_Call { _c.Call.Return(err1) return _c } func (_c *MockJobTrace_Fail_Call) RunAndReturn(run func(err error, failureData JobFailureData) error) *MockJobTrace_Fail_Call { _c.Call.Return(run) return _c } // Finish provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Finish() { _mock.Called() return } // MockJobTrace_Finish_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Finish' type MockJobTrace_Finish_Call struct { *mock.Call } // Finish is a helper method to define mock.On call func (_e *MockJobTrace_Expecter) Finish() *MockJobTrace_Finish_Call { return &MockJobTrace_Finish_Call{Call: _e.mock.On("Finish")} } func (_c *MockJobTrace_Finish_Call) Run(run func()) *MockJobTrace_Finish_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockJobTrace_Finish_Call) Return() *MockJobTrace_Finish_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_Finish_Call) RunAndReturn(run func()) *MockJobTrace_Finish_Call { _c.Run(run) return _c } // IsStdout provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) IsStdout() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for IsStdout") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockJobTrace_IsStdout_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsStdout' type MockJobTrace_IsStdout_Call struct { *mock.Call } // IsStdout is a helper method to define mock.On call func (_e *MockJobTrace_Expecter) IsStdout() *MockJobTrace_IsStdout_Call { return &MockJobTrace_IsStdout_Call{Call: _e.mock.On("IsStdout")} } func (_c *MockJobTrace_IsStdout_Call) Run(run func()) *MockJobTrace_IsStdout_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockJobTrace_IsStdout_Call) Return(b bool) *MockJobTrace_IsStdout_Call { _c.Call.Return(b) return _c } func (_c *MockJobTrace_IsStdout_Call) RunAndReturn(run func() bool) *MockJobTrace_IsStdout_Call { _c.Call.Return(run) return _c } // SetAbortFunc provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) SetAbortFunc(abortFunc context.CancelFunc) { _mock.Called(abortFunc) return } // MockJobTrace_SetAbortFunc_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetAbortFunc' type MockJobTrace_SetAbortFunc_Call struct { *mock.Call } // SetAbortFunc is a helper method to define mock.On call // - abortFunc context.CancelFunc func (_e *MockJobTrace_Expecter) SetAbortFunc(abortFunc interface{}) *MockJobTrace_SetAbortFunc_Call { return &MockJobTrace_SetAbortFunc_Call{Call: _e.mock.On("SetAbortFunc", abortFunc)} } func (_c *MockJobTrace_SetAbortFunc_Call) Run(run func(abortFunc context.CancelFunc)) *MockJobTrace_SetAbortFunc_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.CancelFunc if args[0] != nil { arg0 = args[0].(context.CancelFunc) } run( arg0, ) }) return _c } func (_c *MockJobTrace_SetAbortFunc_Call) Return() *MockJobTrace_SetAbortFunc_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_SetAbortFunc_Call) RunAndReturn(run func(abortFunc context.CancelFunc)) *MockJobTrace_SetAbortFunc_Call { _c.Run(run) return _c } // SetCancelFunc provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) SetCancelFunc(cancelFunc context.CancelFunc) { _mock.Called(cancelFunc) return } // MockJobTrace_SetCancelFunc_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetCancelFunc' type MockJobTrace_SetCancelFunc_Call struct { *mock.Call } // SetCancelFunc is a helper method to define mock.On call // - cancelFunc context.CancelFunc func (_e *MockJobTrace_Expecter) SetCancelFunc(cancelFunc interface{}) *MockJobTrace_SetCancelFunc_Call { return &MockJobTrace_SetCancelFunc_Call{Call: _e.mock.On("SetCancelFunc", cancelFunc)} } func (_c *MockJobTrace_SetCancelFunc_Call) Run(run func(cancelFunc context.CancelFunc)) *MockJobTrace_SetCancelFunc_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.CancelFunc if args[0] != nil { arg0 = args[0].(context.CancelFunc) } run( arg0, ) }) return _c } func (_c *MockJobTrace_SetCancelFunc_Call) Return() *MockJobTrace_SetCancelFunc_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_SetCancelFunc_Call) RunAndReturn(run func(cancelFunc context.CancelFunc)) *MockJobTrace_SetCancelFunc_Call { _c.Run(run) return _c } // SetDebugModeEnabled provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) SetDebugModeEnabled(isEnabled bool) { _mock.Called(isEnabled) return } // MockJobTrace_SetDebugModeEnabled_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetDebugModeEnabled' type MockJobTrace_SetDebugModeEnabled_Call struct { *mock.Call } // SetDebugModeEnabled is a helper method to define mock.On call // - isEnabled bool func (_e *MockJobTrace_Expecter) SetDebugModeEnabled(isEnabled interface{}) *MockJobTrace_SetDebugModeEnabled_Call { return &MockJobTrace_SetDebugModeEnabled_Call{Call: _e.mock.On("SetDebugModeEnabled", isEnabled)} } func (_c *MockJobTrace_SetDebugModeEnabled_Call) Run(run func(isEnabled bool)) *MockJobTrace_SetDebugModeEnabled_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 bool if args[0] != nil { arg0 = args[0].(bool) } run( arg0, ) }) return _c } func (_c *MockJobTrace_SetDebugModeEnabled_Call) Return() *MockJobTrace_SetDebugModeEnabled_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_SetDebugModeEnabled_Call) RunAndReturn(run func(isEnabled bool)) *MockJobTrace_SetDebugModeEnabled_Call { _c.Run(run) return _c } // SetFailuresCollector provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) SetFailuresCollector(fc FailuresCollector) { _mock.Called(fc) return } // MockJobTrace_SetFailuresCollector_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetFailuresCollector' type MockJobTrace_SetFailuresCollector_Call struct { *mock.Call } // SetFailuresCollector is a helper method to define mock.On call // - fc FailuresCollector func (_e *MockJobTrace_Expecter) SetFailuresCollector(fc interface{}) *MockJobTrace_SetFailuresCollector_Call { return &MockJobTrace_SetFailuresCollector_Call{Call: _e.mock.On("SetFailuresCollector", fc)} } func (_c *MockJobTrace_SetFailuresCollector_Call) Run(run func(fc FailuresCollector)) *MockJobTrace_SetFailuresCollector_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 FailuresCollector if args[0] != nil { arg0 = args[0].(FailuresCollector) } run( arg0, ) }) return _c } func (_c *MockJobTrace_SetFailuresCollector_Call) Return() *MockJobTrace_SetFailuresCollector_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_SetFailuresCollector_Call) RunAndReturn(run func(fc FailuresCollector)) *MockJobTrace_SetFailuresCollector_Call { _c.Run(run) return _c } // SetSupportedFailureReasonMapper provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) SetSupportedFailureReasonMapper(f SupportedFailureReasonMapper) { _mock.Called(f) return } // MockJobTrace_SetSupportedFailureReasonMapper_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetSupportedFailureReasonMapper' type MockJobTrace_SetSupportedFailureReasonMapper_Call struct { *mock.Call } // SetSupportedFailureReasonMapper is a helper method to define mock.On call // - f SupportedFailureReasonMapper func (_e *MockJobTrace_Expecter) SetSupportedFailureReasonMapper(f interface{}) *MockJobTrace_SetSupportedFailureReasonMapper_Call { return &MockJobTrace_SetSupportedFailureReasonMapper_Call{Call: _e.mock.On("SetSupportedFailureReasonMapper", f)} } func (_c *MockJobTrace_SetSupportedFailureReasonMapper_Call) Run(run func(f SupportedFailureReasonMapper)) *MockJobTrace_SetSupportedFailureReasonMapper_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 SupportedFailureReasonMapper if args[0] != nil { arg0 = args[0].(SupportedFailureReasonMapper) } run( arg0, ) }) return _c } func (_c *MockJobTrace_SetSupportedFailureReasonMapper_Call) Return() *MockJobTrace_SetSupportedFailureReasonMapper_Call { _c.Call.Return() return _c } func (_c *MockJobTrace_SetSupportedFailureReasonMapper_Call) RunAndReturn(run func(f SupportedFailureReasonMapper)) *MockJobTrace_SetSupportedFailureReasonMapper_Call { _c.Run(run) return _c } // Success provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Success() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Success") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // MockJobTrace_Success_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Success' type MockJobTrace_Success_Call struct { *mock.Call } // Success is a helper method to define mock.On call func (_e *MockJobTrace_Expecter) Success() *MockJobTrace_Success_Call { return &MockJobTrace_Success_Call{Call: _e.mock.On("Success")} } func (_c *MockJobTrace_Success_Call) Run(run func()) *MockJobTrace_Success_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockJobTrace_Success_Call) Return(err error) *MockJobTrace_Success_Call { _c.Call.Return(err) return _c } func (_c *MockJobTrace_Success_Call) RunAndReturn(run func() error) *MockJobTrace_Success_Call { _c.Call.Return(run) return _c } // Write provides a mock function for the type MockJobTrace func (_mock *MockJobTrace) Write(p []byte) (int, error) { ret := _mock.Called(p) if len(ret) == 0 { panic("no return value specified for Write") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(p) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(p) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(p) } else { r1 = ret.Error(1) } return r0, r1 } // MockJobTrace_Write_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Write' type MockJobTrace_Write_Call struct { *mock.Call } // Write is a helper method to define mock.On call // - p []byte func (_e *MockJobTrace_Expecter) Write(p interface{}) *MockJobTrace_Write_Call { return &MockJobTrace_Write_Call{Call: _e.mock.On("Write", p)} } func (_c *MockJobTrace_Write_Call) Run(run func(p []byte)) *MockJobTrace_Write_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *MockJobTrace_Write_Call) Return(n int, err error) *MockJobTrace_Write_Call { _c.Call.Return(n, err) return _c } func (_c *MockJobTrace_Write_Call) RunAndReturn(run func(p []byte) (int, error)) *MockJobTrace_Write_Call { _c.Call.Return(run) return _c } // NewMockNetwork creates a new instance of MockNetwork. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockNetwork(t interface { mock.TestingT Cleanup(func()) }) *MockNetwork { mock := &MockNetwork{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockNetwork is an autogenerated mock type for the Network type type MockNetwork struct { mock.Mock } type MockNetwork_Expecter struct { mock *mock.Mock } func (_m *MockNetwork) EXPECT() *MockNetwork_Expecter { return &MockNetwork_Expecter{mock: &_m.Mock} } // DownloadArtifacts provides a mock function for the type MockNetwork func (_mock *MockNetwork) DownloadArtifacts(config JobCredentials, artifactsFile io.WriteCloser, directDownload *bool) DownloadState { ret := _mock.Called(config, artifactsFile, directDownload) if len(ret) == 0 { panic("no return value specified for DownloadArtifacts") } var r0 DownloadState if returnFunc, ok := ret.Get(0).(func(JobCredentials, io.WriteCloser, *bool) DownloadState); ok { r0 = returnFunc(config, artifactsFile, directDownload) } else { r0 = ret.Get(0).(DownloadState) } return r0 } // MockNetwork_DownloadArtifacts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'DownloadArtifacts' type MockNetwork_DownloadArtifacts_Call struct { *mock.Call } // DownloadArtifacts is a helper method to define mock.On call // - config JobCredentials // - artifactsFile io.WriteCloser // - directDownload *bool func (_e *MockNetwork_Expecter) DownloadArtifacts(config interface{}, artifactsFile interface{}, directDownload interface{}) *MockNetwork_DownloadArtifacts_Call { return &MockNetwork_DownloadArtifacts_Call{Call: _e.mock.On("DownloadArtifacts", config, artifactsFile, directDownload)} } func (_c *MockNetwork_DownloadArtifacts_Call) Run(run func(config JobCredentials, artifactsFile io.WriteCloser, directDownload *bool)) *MockNetwork_DownloadArtifacts_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 JobCredentials if args[0] != nil { arg0 = args[0].(JobCredentials) } var arg1 io.WriteCloser if args[1] != nil { arg1 = args[1].(io.WriteCloser) } var arg2 *bool if args[2] != nil { arg2 = args[2].(*bool) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockNetwork_DownloadArtifacts_Call) Return(downloadState DownloadState) *MockNetwork_DownloadArtifacts_Call { _c.Call.Return(downloadState) return _c } func (_c *MockNetwork_DownloadArtifacts_Call) RunAndReturn(run func(config JobCredentials, artifactsFile io.WriteCloser, directDownload *bool) DownloadState) *MockNetwork_DownloadArtifacts_Call { _c.Call.Return(run) return _c } // PatchTrace provides a mock function for the type MockNetwork func (_mock *MockNetwork) PatchTrace(config RunnerConfig, jobCredentials *JobCredentials, content []byte, startOffset int, debugModeEnabled bool) PatchTraceResult { ret := _mock.Called(config, jobCredentials, content, startOffset, debugModeEnabled) if len(ret) == 0 { panic("no return value specified for PatchTrace") } var r0 PatchTraceResult if returnFunc, ok := ret.Get(0).(func(RunnerConfig, *JobCredentials, []byte, int, bool) PatchTraceResult); ok { r0 = returnFunc(config, jobCredentials, content, startOffset, debugModeEnabled) } else { r0 = ret.Get(0).(PatchTraceResult) } return r0 } // MockNetwork_PatchTrace_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'PatchTrace' type MockNetwork_PatchTrace_Call struct { *mock.Call } // PatchTrace is a helper method to define mock.On call // - config RunnerConfig // - jobCredentials *JobCredentials // - content []byte // - startOffset int // - debugModeEnabled bool func (_e *MockNetwork_Expecter) PatchTrace(config interface{}, jobCredentials interface{}, content interface{}, startOffset interface{}, debugModeEnabled interface{}) *MockNetwork_PatchTrace_Call { return &MockNetwork_PatchTrace_Call{Call: _e.mock.On("PatchTrace", config, jobCredentials, content, startOffset, debugModeEnabled)} } func (_c *MockNetwork_PatchTrace_Call) Run(run func(config RunnerConfig, jobCredentials *JobCredentials, content []byte, startOffset int, debugModeEnabled bool)) *MockNetwork_PatchTrace_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 *JobCredentials if args[1] != nil { arg1 = args[1].(*JobCredentials) } var arg2 []byte if args[2] != nil { arg2 = args[2].([]byte) } var arg3 int if args[3] != nil { arg3 = args[3].(int) } var arg4 bool if args[4] != nil { arg4 = args[4].(bool) } run( arg0, arg1, arg2, arg3, arg4, ) }) return _c } func (_c *MockNetwork_PatchTrace_Call) Return(patchTraceResult PatchTraceResult) *MockNetwork_PatchTrace_Call { _c.Call.Return(patchTraceResult) return _c } func (_c *MockNetwork_PatchTrace_Call) RunAndReturn(run func(config RunnerConfig, jobCredentials *JobCredentials, content []byte, startOffset int, debugModeEnabled bool) PatchTraceResult) *MockNetwork_PatchTrace_Call { _c.Call.Return(run) return _c } // ProcessJob provides a mock function for the type MockNetwork func (_mock *MockNetwork) ProcessJob(config RunnerConfig, buildCredentials *JobCredentials) (JobTrace, error) { ret := _mock.Called(config, buildCredentials) if len(ret) == 0 { panic("no return value specified for ProcessJob") } var r0 JobTrace var r1 error if returnFunc, ok := ret.Get(0).(func(RunnerConfig, *JobCredentials) (JobTrace, error)); ok { return returnFunc(config, buildCredentials) } if returnFunc, ok := ret.Get(0).(func(RunnerConfig, *JobCredentials) JobTrace); ok { r0 = returnFunc(config, buildCredentials) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(JobTrace) } } if returnFunc, ok := ret.Get(1).(func(RunnerConfig, *JobCredentials) error); ok { r1 = returnFunc(config, buildCredentials) } else { r1 = ret.Error(1) } return r0, r1 } // MockNetwork_ProcessJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ProcessJob' type MockNetwork_ProcessJob_Call struct { *mock.Call } // ProcessJob is a helper method to define mock.On call // - config RunnerConfig // - buildCredentials *JobCredentials func (_e *MockNetwork_Expecter) ProcessJob(config interface{}, buildCredentials interface{}) *MockNetwork_ProcessJob_Call { return &MockNetwork_ProcessJob_Call{Call: _e.mock.On("ProcessJob", config, buildCredentials)} } func (_c *MockNetwork_ProcessJob_Call) Run(run func(config RunnerConfig, buildCredentials *JobCredentials)) *MockNetwork_ProcessJob_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 *JobCredentials if args[1] != nil { arg1 = args[1].(*JobCredentials) } run( arg0, arg1, ) }) return _c } func (_c *MockNetwork_ProcessJob_Call) Return(jobTrace JobTrace, err error) *MockNetwork_ProcessJob_Call { _c.Call.Return(jobTrace, err) return _c } func (_c *MockNetwork_ProcessJob_Call) RunAndReturn(run func(config RunnerConfig, buildCredentials *JobCredentials) (JobTrace, error)) *MockNetwork_ProcessJob_Call { _c.Call.Return(run) return _c } // RegisterRunner provides a mock function for the type MockNetwork func (_mock *MockNetwork) RegisterRunner(config RunnerConfig, parameters RegisterRunnerParameters) *RegisterRunnerResponse { ret := _mock.Called(config, parameters) if len(ret) == 0 { panic("no return value specified for RegisterRunner") } var r0 *RegisterRunnerResponse if returnFunc, ok := ret.Get(0).(func(RunnerConfig, RegisterRunnerParameters) *RegisterRunnerResponse); ok { r0 = returnFunc(config, parameters) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*RegisterRunnerResponse) } } return r0 } // MockNetwork_RegisterRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RegisterRunner' type MockNetwork_RegisterRunner_Call struct { *mock.Call } // RegisterRunner is a helper method to define mock.On call // - config RunnerConfig // - parameters RegisterRunnerParameters func (_e *MockNetwork_Expecter) RegisterRunner(config interface{}, parameters interface{}) *MockNetwork_RegisterRunner_Call { return &MockNetwork_RegisterRunner_Call{Call: _e.mock.On("RegisterRunner", config, parameters)} } func (_c *MockNetwork_RegisterRunner_Call) Run(run func(config RunnerConfig, parameters RegisterRunnerParameters)) *MockNetwork_RegisterRunner_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 RegisterRunnerParameters if args[1] != nil { arg1 = args[1].(RegisterRunnerParameters) } run( arg0, arg1, ) }) return _c } func (_c *MockNetwork_RegisterRunner_Call) Return(registerRunnerResponse *RegisterRunnerResponse) *MockNetwork_RegisterRunner_Call { _c.Call.Return(registerRunnerResponse) return _c } func (_c *MockNetwork_RegisterRunner_Call) RunAndReturn(run func(config RunnerConfig, parameters RegisterRunnerParameters) *RegisterRunnerResponse) *MockNetwork_RegisterRunner_Call { _c.Call.Return(run) return _c } // RequestJob provides a mock function for the type MockNetwork func (_mock *MockNetwork) RequestJob(ctx context.Context, config RunnerConfig, sessionInfo *SessionInfo) (*spec.Job, bool) { ret := _mock.Called(ctx, config, sessionInfo) if len(ret) == 0 { panic("no return value specified for RequestJob") } var r0 *spec.Job var r1 bool if returnFunc, ok := ret.Get(0).(func(context.Context, RunnerConfig, *SessionInfo) (*spec.Job, bool)); ok { return returnFunc(ctx, config, sessionInfo) } if returnFunc, ok := ret.Get(0).(func(context.Context, RunnerConfig, *SessionInfo) *spec.Job); ok { r0 = returnFunc(ctx, config, sessionInfo) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*spec.Job) } } if returnFunc, ok := ret.Get(1).(func(context.Context, RunnerConfig, *SessionInfo) bool); ok { r1 = returnFunc(ctx, config, sessionInfo) } else { r1 = ret.Get(1).(bool) } return r0, r1 } // MockNetwork_RequestJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RequestJob' type MockNetwork_RequestJob_Call struct { *mock.Call } // RequestJob is a helper method to define mock.On call // - ctx context.Context // - config RunnerConfig // - sessionInfo *SessionInfo func (_e *MockNetwork_Expecter) RequestJob(ctx interface{}, config interface{}, sessionInfo interface{}) *MockNetwork_RequestJob_Call { return &MockNetwork_RequestJob_Call{Call: _e.mock.On("RequestJob", ctx, config, sessionInfo)} } func (_c *MockNetwork_RequestJob_Call) Run(run func(ctx context.Context, config RunnerConfig, sessionInfo *SessionInfo)) *MockNetwork_RequestJob_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 RunnerConfig if args[1] != nil { arg1 = args[1].(RunnerConfig) } var arg2 *SessionInfo if args[2] != nil { arg2 = args[2].(*SessionInfo) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockNetwork_RequestJob_Call) Return(job *spec.Job, b bool) *MockNetwork_RequestJob_Call { _c.Call.Return(job, b) return _c } func (_c *MockNetwork_RequestJob_Call) RunAndReturn(run func(ctx context.Context, config RunnerConfig, sessionInfo *SessionInfo) (*spec.Job, bool)) *MockNetwork_RequestJob_Call { _c.Call.Return(run) return _c } // ResetToken provides a mock function for the type MockNetwork func (_mock *MockNetwork) ResetToken(runner RunnerConfig, systemID string) *ResetTokenResponse { ret := _mock.Called(runner, systemID) if len(ret) == 0 { panic("no return value specified for ResetToken") } var r0 *ResetTokenResponse if returnFunc, ok := ret.Get(0).(func(RunnerConfig, string) *ResetTokenResponse); ok { r0 = returnFunc(runner, systemID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*ResetTokenResponse) } } return r0 } // MockNetwork_ResetToken_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResetToken' type MockNetwork_ResetToken_Call struct { *mock.Call } // ResetToken is a helper method to define mock.On call // - runner RunnerConfig // - systemID string func (_e *MockNetwork_Expecter) ResetToken(runner interface{}, systemID interface{}) *MockNetwork_ResetToken_Call { return &MockNetwork_ResetToken_Call{Call: _e.mock.On("ResetToken", runner, systemID)} } func (_c *MockNetwork_ResetToken_Call) Run(run func(runner RunnerConfig, systemID string)) *MockNetwork_ResetToken_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockNetwork_ResetToken_Call) Return(resetTokenResponse *ResetTokenResponse) *MockNetwork_ResetToken_Call { _c.Call.Return(resetTokenResponse) return _c } func (_c *MockNetwork_ResetToken_Call) RunAndReturn(run func(runner RunnerConfig, systemID string) *ResetTokenResponse) *MockNetwork_ResetToken_Call { _c.Call.Return(run) return _c } // ResetTokenWithPAT provides a mock function for the type MockNetwork func (_mock *MockNetwork) ResetTokenWithPAT(runner RunnerConfig, systemID string, pat string) *ResetTokenResponse { ret := _mock.Called(runner, systemID, pat) if len(ret) == 0 { panic("no return value specified for ResetTokenWithPAT") } var r0 *ResetTokenResponse if returnFunc, ok := ret.Get(0).(func(RunnerConfig, string, string) *ResetTokenResponse); ok { r0 = returnFunc(runner, systemID, pat) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*ResetTokenResponse) } } return r0 } // MockNetwork_ResetTokenWithPAT_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ResetTokenWithPAT' type MockNetwork_ResetTokenWithPAT_Call struct { *mock.Call } // ResetTokenWithPAT is a helper method to define mock.On call // - runner RunnerConfig // - systemID string // - pat string func (_e *MockNetwork_Expecter) ResetTokenWithPAT(runner interface{}, systemID interface{}, pat interface{}) *MockNetwork_ResetTokenWithPAT_Call { return &MockNetwork_ResetTokenWithPAT_Call{Call: _e.mock.On("ResetTokenWithPAT", runner, systemID, pat)} } func (_c *MockNetwork_ResetTokenWithPAT_Call) Run(run func(runner RunnerConfig, systemID string, pat string)) *MockNetwork_ResetTokenWithPAT_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 string if args[2] != nil { arg2 = args[2].(string) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockNetwork_ResetTokenWithPAT_Call) Return(resetTokenResponse *ResetTokenResponse) *MockNetwork_ResetTokenWithPAT_Call { _c.Call.Return(resetTokenResponse) return _c } func (_c *MockNetwork_ResetTokenWithPAT_Call) RunAndReturn(run func(runner RunnerConfig, systemID string, pat string) *ResetTokenResponse) *MockNetwork_ResetTokenWithPAT_Call { _c.Call.Return(run) return _c } // SetConnectionMaxAge provides a mock function for the type MockNetwork func (_mock *MockNetwork) SetConnectionMaxAge(duration time.Duration) { _mock.Called(duration) return } // MockNetwork_SetConnectionMaxAge_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetConnectionMaxAge' type MockNetwork_SetConnectionMaxAge_Call struct { *mock.Call } // SetConnectionMaxAge is a helper method to define mock.On call // - duration time.Duration func (_e *MockNetwork_Expecter) SetConnectionMaxAge(duration interface{}) *MockNetwork_SetConnectionMaxAge_Call { return &MockNetwork_SetConnectionMaxAge_Call{Call: _e.mock.On("SetConnectionMaxAge", duration)} } func (_c *MockNetwork_SetConnectionMaxAge_Call) Run(run func(duration time.Duration)) *MockNetwork_SetConnectionMaxAge_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 time.Duration if args[0] != nil { arg0 = args[0].(time.Duration) } run( arg0, ) }) return _c } func (_c *MockNetwork_SetConnectionMaxAge_Call) Return() *MockNetwork_SetConnectionMaxAge_Call { _c.Call.Return() return _c } func (_c *MockNetwork_SetConnectionMaxAge_Call) RunAndReturn(run func(duration time.Duration)) *MockNetwork_SetConnectionMaxAge_Call { _c.Run(run) return _c } // UnregisterRunner provides a mock function for the type MockNetwork func (_mock *MockNetwork) UnregisterRunner(config RunnerConfig) bool { ret := _mock.Called(config) if len(ret) == 0 { panic("no return value specified for UnregisterRunner") } var r0 bool if returnFunc, ok := ret.Get(0).(func(RunnerConfig) bool); ok { r0 = returnFunc(config) } else { r0 = ret.Get(0).(bool) } return r0 } // MockNetwork_UnregisterRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnregisterRunner' type MockNetwork_UnregisterRunner_Call struct { *mock.Call } // UnregisterRunner is a helper method to define mock.On call // - config RunnerConfig func (_e *MockNetwork_Expecter) UnregisterRunner(config interface{}) *MockNetwork_UnregisterRunner_Call { return &MockNetwork_UnregisterRunner_Call{Call: _e.mock.On("UnregisterRunner", config)} } func (_c *MockNetwork_UnregisterRunner_Call) Run(run func(config RunnerConfig)) *MockNetwork_UnregisterRunner_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } run( arg0, ) }) return _c } func (_c *MockNetwork_UnregisterRunner_Call) Return(b bool) *MockNetwork_UnregisterRunner_Call { _c.Call.Return(b) return _c } func (_c *MockNetwork_UnregisterRunner_Call) RunAndReturn(run func(config RunnerConfig) bool) *MockNetwork_UnregisterRunner_Call { _c.Call.Return(run) return _c } // UnregisterRunnerManager provides a mock function for the type MockNetwork func (_mock *MockNetwork) UnregisterRunnerManager(config RunnerConfig, systemID string) bool { ret := _mock.Called(config, systemID) if len(ret) == 0 { panic("no return value specified for UnregisterRunnerManager") } var r0 bool if returnFunc, ok := ret.Get(0).(func(RunnerConfig, string) bool); ok { r0 = returnFunc(config, systemID) } else { r0 = ret.Get(0).(bool) } return r0 } // MockNetwork_UnregisterRunnerManager_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnregisterRunnerManager' type MockNetwork_UnregisterRunnerManager_Call struct { *mock.Call } // UnregisterRunnerManager is a helper method to define mock.On call // - config RunnerConfig // - systemID string func (_e *MockNetwork_Expecter) UnregisterRunnerManager(config interface{}, systemID interface{}) *MockNetwork_UnregisterRunnerManager_Call { return &MockNetwork_UnregisterRunnerManager_Call{Call: _e.mock.On("UnregisterRunnerManager", config, systemID)} } func (_c *MockNetwork_UnregisterRunnerManager_Call) Run(run func(config RunnerConfig, systemID string)) *MockNetwork_UnregisterRunnerManager_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockNetwork_UnregisterRunnerManager_Call) Return(b bool) *MockNetwork_UnregisterRunnerManager_Call { _c.Call.Return(b) return _c } func (_c *MockNetwork_UnregisterRunnerManager_Call) RunAndReturn(run func(config RunnerConfig, systemID string) bool) *MockNetwork_UnregisterRunnerManager_Call { _c.Call.Return(run) return _c } // UpdateJob provides a mock function for the type MockNetwork func (_mock *MockNetwork) UpdateJob(config RunnerConfig, jobCredentials *JobCredentials, jobInfo UpdateJobInfo) UpdateJobResult { ret := _mock.Called(config, jobCredentials, jobInfo) if len(ret) == 0 { panic("no return value specified for UpdateJob") } var r0 UpdateJobResult if returnFunc, ok := ret.Get(0).(func(RunnerConfig, *JobCredentials, UpdateJobInfo) UpdateJobResult); ok { r0 = returnFunc(config, jobCredentials, jobInfo) } else { r0 = ret.Get(0).(UpdateJobResult) } return r0 } // MockNetwork_UpdateJob_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UpdateJob' type MockNetwork_UpdateJob_Call struct { *mock.Call } // UpdateJob is a helper method to define mock.On call // - config RunnerConfig // - jobCredentials *JobCredentials // - jobInfo UpdateJobInfo func (_e *MockNetwork_Expecter) UpdateJob(config interface{}, jobCredentials interface{}, jobInfo interface{}) *MockNetwork_UpdateJob_Call { return &MockNetwork_UpdateJob_Call{Call: _e.mock.On("UpdateJob", config, jobCredentials, jobInfo)} } func (_c *MockNetwork_UpdateJob_Call) Run(run func(config RunnerConfig, jobCredentials *JobCredentials, jobInfo UpdateJobInfo)) *MockNetwork_UpdateJob_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 *JobCredentials if args[1] != nil { arg1 = args[1].(*JobCredentials) } var arg2 UpdateJobInfo if args[2] != nil { arg2 = args[2].(UpdateJobInfo) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockNetwork_UpdateJob_Call) Return(updateJobResult UpdateJobResult) *MockNetwork_UpdateJob_Call { _c.Call.Return(updateJobResult) return _c } func (_c *MockNetwork_UpdateJob_Call) RunAndReturn(run func(config RunnerConfig, jobCredentials *JobCredentials, jobInfo UpdateJobInfo) UpdateJobResult) *MockNetwork_UpdateJob_Call { _c.Call.Return(run) return _c } // UploadRawArtifacts provides a mock function for the type MockNetwork func (_mock *MockNetwork) UploadRawArtifacts(config JobCredentials, bodyProvider ContentProvider, options ArtifactsOptions) (UploadState, string) { ret := _mock.Called(config, bodyProvider, options) if len(ret) == 0 { panic("no return value specified for UploadRawArtifacts") } var r0 UploadState var r1 string if returnFunc, ok := ret.Get(0).(func(JobCredentials, ContentProvider, ArtifactsOptions) (UploadState, string)); ok { return returnFunc(config, bodyProvider, options) } if returnFunc, ok := ret.Get(0).(func(JobCredentials, ContentProvider, ArtifactsOptions) UploadState); ok { r0 = returnFunc(config, bodyProvider, options) } else { r0 = ret.Get(0).(UploadState) } if returnFunc, ok := ret.Get(1).(func(JobCredentials, ContentProvider, ArtifactsOptions) string); ok { r1 = returnFunc(config, bodyProvider, options) } else { r1 = ret.Get(1).(string) } return r0, r1 } // MockNetwork_UploadRawArtifacts_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UploadRawArtifacts' type MockNetwork_UploadRawArtifacts_Call struct { *mock.Call } // UploadRawArtifacts is a helper method to define mock.On call // - config JobCredentials // - bodyProvider ContentProvider // - options ArtifactsOptions func (_e *MockNetwork_Expecter) UploadRawArtifacts(config interface{}, bodyProvider interface{}, options interface{}) *MockNetwork_UploadRawArtifacts_Call { return &MockNetwork_UploadRawArtifacts_Call{Call: _e.mock.On("UploadRawArtifacts", config, bodyProvider, options)} } func (_c *MockNetwork_UploadRawArtifacts_Call) Run(run func(config JobCredentials, bodyProvider ContentProvider, options ArtifactsOptions)) *MockNetwork_UploadRawArtifacts_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 JobCredentials if args[0] != nil { arg0 = args[0].(JobCredentials) } var arg1 ContentProvider if args[1] != nil { arg1 = args[1].(ContentProvider) } var arg2 ArtifactsOptions if args[2] != nil { arg2 = args[2].(ArtifactsOptions) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockNetwork_UploadRawArtifacts_Call) Return(uploadState UploadState, s string) *MockNetwork_UploadRawArtifacts_Call { _c.Call.Return(uploadState, s) return _c } func (_c *MockNetwork_UploadRawArtifacts_Call) RunAndReturn(run func(config JobCredentials, bodyProvider ContentProvider, options ArtifactsOptions) (UploadState, string)) *MockNetwork_UploadRawArtifacts_Call { _c.Call.Return(run) return _c } // VerifyRunner provides a mock function for the type MockNetwork func (_mock *MockNetwork) VerifyRunner(config RunnerConfig, systemID string) *VerifyRunnerResponse { ret := _mock.Called(config, systemID) if len(ret) == 0 { panic("no return value specified for VerifyRunner") } var r0 *VerifyRunnerResponse if returnFunc, ok := ret.Get(0).(func(RunnerConfig, string) *VerifyRunnerResponse); ok { r0 = returnFunc(config, systemID) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*VerifyRunnerResponse) } } return r0 } // MockNetwork_VerifyRunner_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'VerifyRunner' type MockNetwork_VerifyRunner_Call struct { *mock.Call } // VerifyRunner is a helper method to define mock.On call // - config RunnerConfig // - systemID string func (_e *MockNetwork_Expecter) VerifyRunner(config interface{}, systemID interface{}) *MockNetwork_VerifyRunner_Call { return &MockNetwork_VerifyRunner_Call{Call: _e.mock.On("VerifyRunner", config, systemID)} } func (_c *MockNetwork_VerifyRunner_Call) Run(run func(config RunnerConfig, systemID string)) *MockNetwork_VerifyRunner_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 RunnerConfig if args[0] != nil { arg0 = args[0].(RunnerConfig) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockNetwork_VerifyRunner_Call) Return(verifyRunnerResponse *VerifyRunnerResponse) *MockNetwork_VerifyRunner_Call { _c.Call.Return(verifyRunnerResponse) return _c } func (_c *MockNetwork_VerifyRunner_Call) RunAndReturn(run func(config RunnerConfig, systemID string) *VerifyRunnerResponse) *MockNetwork_VerifyRunner_Call { _c.Call.Return(run) return _c } // newMockLogger creates a new instance of mockLogger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockLogger(t interface { mock.TestingT Cleanup(func()) }) *mockLogger { mock := &mockLogger{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockLogger is an autogenerated mock type for the logger type type mockLogger struct { mock.Mock } type mockLogger_Expecter struct { mock *mock.Mock } func (_m *mockLogger) EXPECT() *mockLogger_Expecter { return &mockLogger_Expecter{mock: &_m.Mock} } // Println provides a mock function for the type mockLogger func (_mock *mockLogger) Println(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockLogger_Println_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Println' type mockLogger_Println_Call struct { *mock.Call } // Println is a helper method to define mock.On call // - args ...interface{} func (_e *mockLogger_Expecter) Println(args ...interface{}) *mockLogger_Println_Call { return &mockLogger_Println_Call{Call: _e.mock.On("Println", append([]interface{}{}, args...)...)} } func (_c *mockLogger_Println_Call) Run(run func(args ...interface{})) *mockLogger_Println_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockLogger_Println_Call) Return() *mockLogger_Println_Call { _c.Call.Return() return _c } func (_c *mockLogger_Println_Call) RunAndReturn(run func(args ...interface{})) *mockLogger_Println_Call { _c.Run(run) return _c } // Warningln provides a mock function for the type mockLogger func (_mock *mockLogger) Warningln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockLogger_Warningln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Warningln' type mockLogger_Warningln_Call struct { *mock.Call } // Warningln is a helper method to define mock.On call // - args ...interface{} func (_e *mockLogger_Expecter) Warningln(args ...interface{}) *mockLogger_Warningln_Call { return &mockLogger_Warningln_Call{Call: _e.mock.On("Warningln", append([]interface{}{}, args...)...)} } func (_c *mockLogger_Warningln_Call) Run(run func(args ...interface{})) *mockLogger_Warningln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockLogger_Warningln_Call) Return() *mockLogger_Warningln_Call { _c.Call.Return() return _c } func (_c *mockLogger_Warningln_Call) RunAndReturn(run func(args ...interface{})) *mockLogger_Warningln_Call { _c.Run(run) return _c } // NewMockSecretsResolver creates a new instance of MockSecretsResolver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockSecretsResolver(t interface { mock.TestingT Cleanup(func()) }) *MockSecretsResolver { mock := &MockSecretsResolver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockSecretsResolver is an autogenerated mock type for the SecretsResolver type type MockSecretsResolver struct { mock.Mock } type MockSecretsResolver_Expecter struct { mock *mock.Mock } func (_m *MockSecretsResolver) EXPECT() *MockSecretsResolver_Expecter { return &MockSecretsResolver_Expecter{mock: &_m.Mock} } // Resolve provides a mock function for the type MockSecretsResolver func (_mock *MockSecretsResolver) Resolve(secrets spec.Secrets) (spec.Variables, error) { ret := _mock.Called(secrets) if len(ret) == 0 { panic("no return value specified for Resolve") } var r0 spec.Variables var r1 error if returnFunc, ok := ret.Get(0).(func(spec.Secrets) (spec.Variables, error)); ok { return returnFunc(secrets) } if returnFunc, ok := ret.Get(0).(func(spec.Secrets) spec.Variables); ok { r0 = returnFunc(secrets) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(spec.Variables) } } if returnFunc, ok := ret.Get(1).(func(spec.Secrets) error); ok { r1 = returnFunc(secrets) } else { r1 = ret.Error(1) } return r0, r1 } // MockSecretsResolver_Resolve_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Resolve' type MockSecretsResolver_Resolve_Call struct { *mock.Call } // Resolve is a helper method to define mock.On call // - secrets spec.Secrets func (_e *MockSecretsResolver_Expecter) Resolve(secrets interface{}) *MockSecretsResolver_Resolve_Call { return &MockSecretsResolver_Resolve_Call{Call: _e.mock.On("Resolve", secrets)} } func (_c *MockSecretsResolver_Resolve_Call) Run(run func(secrets spec.Secrets)) *MockSecretsResolver_Resolve_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 spec.Secrets if args[0] != nil { arg0 = args[0].(spec.Secrets) } run( arg0, ) }) return _c } func (_c *MockSecretsResolver_Resolve_Call) Return(variables spec.Variables, err error) *MockSecretsResolver_Resolve_Call { _c.Call.Return(variables, err) return _c } func (_c *MockSecretsResolver_Resolve_Call) RunAndReturn(run func(secrets spec.Secrets) (spec.Variables, error)) *MockSecretsResolver_Resolve_Call { _c.Call.Return(run) return _c } // NewMockSecretResolverRegistry creates a new instance of MockSecretResolverRegistry. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockSecretResolverRegistry(t interface { mock.TestingT Cleanup(func()) }) *MockSecretResolverRegistry { mock := &MockSecretResolverRegistry{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockSecretResolverRegistry is an autogenerated mock type for the SecretResolverRegistry type type MockSecretResolverRegistry struct { mock.Mock } type MockSecretResolverRegistry_Expecter struct { mock *mock.Mock } func (_m *MockSecretResolverRegistry) EXPECT() *MockSecretResolverRegistry_Expecter { return &MockSecretResolverRegistry_Expecter{mock: &_m.Mock} } // GetFor provides a mock function for the type MockSecretResolverRegistry func (_mock *MockSecretResolverRegistry) GetFor(secret spec.Secret) (SecretResolver, error) { ret := _mock.Called(secret) if len(ret) == 0 { panic("no return value specified for GetFor") } var r0 SecretResolver var r1 error if returnFunc, ok := ret.Get(0).(func(spec.Secret) (SecretResolver, error)); ok { return returnFunc(secret) } if returnFunc, ok := ret.Get(0).(func(spec.Secret) SecretResolver); ok { r0 = returnFunc(secret) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(SecretResolver) } } if returnFunc, ok := ret.Get(1).(func(spec.Secret) error); ok { r1 = returnFunc(secret) } else { r1 = ret.Error(1) } return r0, r1 } // MockSecretResolverRegistry_GetFor_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetFor' type MockSecretResolverRegistry_GetFor_Call struct { *mock.Call } // GetFor is a helper method to define mock.On call // - secret spec.Secret func (_e *MockSecretResolverRegistry_Expecter) GetFor(secret interface{}) *MockSecretResolverRegistry_GetFor_Call { return &MockSecretResolverRegistry_GetFor_Call{Call: _e.mock.On("GetFor", secret)} } func (_c *MockSecretResolverRegistry_GetFor_Call) Run(run func(secret spec.Secret)) *MockSecretResolverRegistry_GetFor_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 spec.Secret if args[0] != nil { arg0 = args[0].(spec.Secret) } run( arg0, ) }) return _c } func (_c *MockSecretResolverRegistry_GetFor_Call) Return(secretResolver SecretResolver, err error) *MockSecretResolverRegistry_GetFor_Call { _c.Call.Return(secretResolver, err) return _c } func (_c *MockSecretResolverRegistry_GetFor_Call) RunAndReturn(run func(secret spec.Secret) (SecretResolver, error)) *MockSecretResolverRegistry_GetFor_Call { _c.Call.Return(run) return _c } // Register provides a mock function for the type MockSecretResolverRegistry func (_mock *MockSecretResolverRegistry) Register(f secretResolverFactory) { _mock.Called(f) return } // MockSecretResolverRegistry_Register_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Register' type MockSecretResolverRegistry_Register_Call struct { *mock.Call } // Register is a helper method to define mock.On call // - f secretResolverFactory func (_e *MockSecretResolverRegistry_Expecter) Register(f interface{}) *MockSecretResolverRegistry_Register_Call { return &MockSecretResolverRegistry_Register_Call{Call: _e.mock.On("Register", f)} } func (_c *MockSecretResolverRegistry_Register_Call) Run(run func(f secretResolverFactory)) *MockSecretResolverRegistry_Register_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 secretResolverFactory if args[0] != nil { arg0 = args[0].(secretResolverFactory) } run( arg0, ) }) return _c } func (_c *MockSecretResolverRegistry_Register_Call) Return() *MockSecretResolverRegistry_Register_Call { _c.Call.Return() return _c } func (_c *MockSecretResolverRegistry_Register_Call) RunAndReturn(run func(f secretResolverFactory)) *MockSecretResolverRegistry_Register_Call { _c.Run(run) return _c } // NewMockSecretResolver creates a new instance of MockSecretResolver. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockSecretResolver(t interface { mock.TestingT Cleanup(func()) }) *MockSecretResolver { mock := &MockSecretResolver{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockSecretResolver is an autogenerated mock type for the SecretResolver type type MockSecretResolver struct { mock.Mock } type MockSecretResolver_Expecter struct { mock *mock.Mock } func (_m *MockSecretResolver) EXPECT() *MockSecretResolver_Expecter { return &MockSecretResolver_Expecter{mock: &_m.Mock} } // IsSupported provides a mock function for the type MockSecretResolver func (_mock *MockSecretResolver) IsSupported() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for IsSupported") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockSecretResolver_IsSupported_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsSupported' type MockSecretResolver_IsSupported_Call struct { *mock.Call } // IsSupported is a helper method to define mock.On call func (_e *MockSecretResolver_Expecter) IsSupported() *MockSecretResolver_IsSupported_Call { return &MockSecretResolver_IsSupported_Call{Call: _e.mock.On("IsSupported")} } func (_c *MockSecretResolver_IsSupported_Call) Run(run func()) *MockSecretResolver_IsSupported_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockSecretResolver_IsSupported_Call) Return(b bool) *MockSecretResolver_IsSupported_Call { _c.Call.Return(b) return _c } func (_c *MockSecretResolver_IsSupported_Call) RunAndReturn(run func() bool) *MockSecretResolver_IsSupported_Call { _c.Call.Return(run) return _c } // Name provides a mock function for the type MockSecretResolver func (_mock *MockSecretResolver) Name() string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Name") } var r0 string if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } return r0 } // MockSecretResolver_Name_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Name' type MockSecretResolver_Name_Call struct { *mock.Call } // Name is a helper method to define mock.On call func (_e *MockSecretResolver_Expecter) Name() *MockSecretResolver_Name_Call { return &MockSecretResolver_Name_Call{Call: _e.mock.On("Name")} } func (_c *MockSecretResolver_Name_Call) Run(run func()) *MockSecretResolver_Name_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockSecretResolver_Name_Call) Return(s string) *MockSecretResolver_Name_Call { _c.Call.Return(s) return _c } func (_c *MockSecretResolver_Name_Call) RunAndReturn(run func() string) *MockSecretResolver_Name_Call { _c.Call.Return(run) return _c } // Resolve provides a mock function for the type MockSecretResolver func (_mock *MockSecretResolver) Resolve() (string, error) { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Resolve") } var r0 string var r1 error if returnFunc, ok := ret.Get(0).(func() (string, error)); ok { return returnFunc() } if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } if returnFunc, ok := ret.Get(1).(func() error); ok { r1 = returnFunc() } else { r1 = ret.Error(1) } return r0, r1 } // MockSecretResolver_Resolve_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Resolve' type MockSecretResolver_Resolve_Call struct { *mock.Call } // Resolve is a helper method to define mock.On call func (_e *MockSecretResolver_Expecter) Resolve() *MockSecretResolver_Resolve_Call { return &MockSecretResolver_Resolve_Call{Call: _e.mock.On("Resolve")} } func (_c *MockSecretResolver_Resolve_Call) Run(run func()) *MockSecretResolver_Resolve_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockSecretResolver_Resolve_Call) Return(s string, err error) *MockSecretResolver_Resolve_Call { _c.Call.Return(s, err) return _c } func (_c *MockSecretResolver_Resolve_Call) RunAndReturn(run func() (string, error)) *MockSecretResolver_Resolve_Call { _c.Call.Return(run) return _c } // NewMockShell creates a new instance of MockShell. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockShell(t interface { mock.TestingT Cleanup(func()) }) *MockShell { mock := &MockShell{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockShell is an autogenerated mock type for the Shell type type MockShell struct { mock.Mock } type MockShell_Expecter struct { mock *mock.Mock } func (_m *MockShell) EXPECT() *MockShell_Expecter { return &MockShell_Expecter{mock: &_m.Mock} } // GenerateSaveScript provides a mock function for the type MockShell func (_mock *MockShell) GenerateSaveScript(info ShellScriptInfo, scriptPath string, script string) (string, error) { ret := _mock.Called(info, scriptPath, script) if len(ret) == 0 { panic("no return value specified for GenerateSaveScript") } var r0 string var r1 error if returnFunc, ok := ret.Get(0).(func(ShellScriptInfo, string, string) (string, error)); ok { return returnFunc(info, scriptPath, script) } if returnFunc, ok := ret.Get(0).(func(ShellScriptInfo, string, string) string); ok { r0 = returnFunc(info, scriptPath, script) } else { r0 = ret.Get(0).(string) } if returnFunc, ok := ret.Get(1).(func(ShellScriptInfo, string, string) error); ok { r1 = returnFunc(info, scriptPath, script) } else { r1 = ret.Error(1) } return r0, r1 } // MockShell_GenerateSaveScript_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GenerateSaveScript' type MockShell_GenerateSaveScript_Call struct { *mock.Call } // GenerateSaveScript is a helper method to define mock.On call // - info ShellScriptInfo // - scriptPath string // - script string func (_e *MockShell_Expecter) GenerateSaveScript(info interface{}, scriptPath interface{}, script interface{}) *MockShell_GenerateSaveScript_Call { return &MockShell_GenerateSaveScript_Call{Call: _e.mock.On("GenerateSaveScript", info, scriptPath, script)} } func (_c *MockShell_GenerateSaveScript_Call) Run(run func(info ShellScriptInfo, scriptPath string, script string)) *MockShell_GenerateSaveScript_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ShellScriptInfo if args[0] != nil { arg0 = args[0].(ShellScriptInfo) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 string if args[2] != nil { arg2 = args[2].(string) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockShell_GenerateSaveScript_Call) Return(s string, err error) *MockShell_GenerateSaveScript_Call { _c.Call.Return(s, err) return _c } func (_c *MockShell_GenerateSaveScript_Call) RunAndReturn(run func(info ShellScriptInfo, scriptPath string, script string) (string, error)) *MockShell_GenerateSaveScript_Call { _c.Call.Return(run) return _c } // GenerateScript provides a mock function for the type MockShell func (_mock *MockShell) GenerateScript(ctx context.Context, buildStage BuildStage, info ShellScriptInfo) (string, error) { ret := _mock.Called(ctx, buildStage, info) if len(ret) == 0 { panic("no return value specified for GenerateScript") } var r0 string var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, BuildStage, ShellScriptInfo) (string, error)); ok { return returnFunc(ctx, buildStage, info) } if returnFunc, ok := ret.Get(0).(func(context.Context, BuildStage, ShellScriptInfo) string); ok { r0 = returnFunc(ctx, buildStage, info) } else { r0 = ret.Get(0).(string) } if returnFunc, ok := ret.Get(1).(func(context.Context, BuildStage, ShellScriptInfo) error); ok { r1 = returnFunc(ctx, buildStage, info) } else { r1 = ret.Error(1) } return r0, r1 } // MockShell_GenerateScript_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GenerateScript' type MockShell_GenerateScript_Call struct { *mock.Call } // GenerateScript is a helper method to define mock.On call // - ctx context.Context // - buildStage BuildStage // - info ShellScriptInfo func (_e *MockShell_Expecter) GenerateScript(ctx interface{}, buildStage interface{}, info interface{}) *MockShell_GenerateScript_Call { return &MockShell_GenerateScript_Call{Call: _e.mock.On("GenerateScript", ctx, buildStage, info)} } func (_c *MockShell_GenerateScript_Call) Run(run func(ctx context.Context, buildStage BuildStage, info ShellScriptInfo)) *MockShell_GenerateScript_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 BuildStage if args[1] != nil { arg1 = args[1].(BuildStage) } var arg2 ShellScriptInfo if args[2] != nil { arg2 = args[2].(ShellScriptInfo) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockShell_GenerateScript_Call) Return(s string, err error) *MockShell_GenerateScript_Call { _c.Call.Return(s, err) return _c } func (_c *MockShell_GenerateScript_Call) RunAndReturn(run func(ctx context.Context, buildStage BuildStage, info ShellScriptInfo) (string, error)) *MockShell_GenerateScript_Call { _c.Call.Return(run) return _c } // GetConfiguration provides a mock function for the type MockShell func (_mock *MockShell) GetConfiguration(info ShellScriptInfo) (*ShellConfiguration, error) { ret := _mock.Called(info) if len(ret) == 0 { panic("no return value specified for GetConfiguration") } var r0 *ShellConfiguration var r1 error if returnFunc, ok := ret.Get(0).(func(ShellScriptInfo) (*ShellConfiguration, error)); ok { return returnFunc(info) } if returnFunc, ok := ret.Get(0).(func(ShellScriptInfo) *ShellConfiguration); ok { r0 = returnFunc(info) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*ShellConfiguration) } } if returnFunc, ok := ret.Get(1).(func(ShellScriptInfo) error); ok { r1 = returnFunc(info) } else { r1 = ret.Error(1) } return r0, r1 } // MockShell_GetConfiguration_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetConfiguration' type MockShell_GetConfiguration_Call struct { *mock.Call } // GetConfiguration is a helper method to define mock.On call // - info ShellScriptInfo func (_e *MockShell_Expecter) GetConfiguration(info interface{}) *MockShell_GetConfiguration_Call { return &MockShell_GetConfiguration_Call{Call: _e.mock.On("GetConfiguration", info)} } func (_c *MockShell_GetConfiguration_Call) Run(run func(info ShellScriptInfo)) *MockShell_GetConfiguration_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ShellScriptInfo if args[0] != nil { arg0 = args[0].(ShellScriptInfo) } run( arg0, ) }) return _c } func (_c *MockShell_GetConfiguration_Call) Return(shellConfiguration *ShellConfiguration, err error) *MockShell_GetConfiguration_Call { _c.Call.Return(shellConfiguration, err) return _c } func (_c *MockShell_GetConfiguration_Call) RunAndReturn(run func(info ShellScriptInfo) (*ShellConfiguration, error)) *MockShell_GetConfiguration_Call { _c.Call.Return(run) return _c } // GetEntrypointCommand provides a mock function for the type MockShell func (_mock *MockShell) GetEntrypointCommand(info ShellScriptInfo, probeFile string) []string { ret := _mock.Called(info, probeFile) if len(ret) == 0 { panic("no return value specified for GetEntrypointCommand") } var r0 []string if returnFunc, ok := ret.Get(0).(func(ShellScriptInfo, string) []string); ok { r0 = returnFunc(info, probeFile) } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) } } return r0 } // MockShell_GetEntrypointCommand_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetEntrypointCommand' type MockShell_GetEntrypointCommand_Call struct { *mock.Call } // GetEntrypointCommand is a helper method to define mock.On call // - info ShellScriptInfo // - probeFile string func (_e *MockShell_Expecter) GetEntrypointCommand(info interface{}, probeFile interface{}) *MockShell_GetEntrypointCommand_Call { return &MockShell_GetEntrypointCommand_Call{Call: _e.mock.On("GetEntrypointCommand", info, probeFile)} } func (_c *MockShell_GetEntrypointCommand_Call) Run(run func(info ShellScriptInfo, probeFile string)) *MockShell_GetEntrypointCommand_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 ShellScriptInfo if args[0] != nil { arg0 = args[0].(ShellScriptInfo) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockShell_GetEntrypointCommand_Call) Return(strings []string) *MockShell_GetEntrypointCommand_Call { _c.Call.Return(strings) return _c } func (_c *MockShell_GetEntrypointCommand_Call) RunAndReturn(run func(info ShellScriptInfo, probeFile string) []string) *MockShell_GetEntrypointCommand_Call { _c.Call.Return(run) return _c } // GetFeatures provides a mock function for the type MockShell func (_mock *MockShell) GetFeatures(features *FeaturesInfo) { _mock.Called(features) return } // MockShell_GetFeatures_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetFeatures' type MockShell_GetFeatures_Call struct { *mock.Call } // GetFeatures is a helper method to define mock.On call // - features *FeaturesInfo func (_e *MockShell_Expecter) GetFeatures(features interface{}) *MockShell_GetFeatures_Call { return &MockShell_GetFeatures_Call{Call: _e.mock.On("GetFeatures", features)} } func (_c *MockShell_GetFeatures_Call) Run(run func(features *FeaturesInfo)) *MockShell_GetFeatures_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *FeaturesInfo if args[0] != nil { arg0 = args[0].(*FeaturesInfo) } run( arg0, ) }) return _c } func (_c *MockShell_GetFeatures_Call) Return() *MockShell_GetFeatures_Call { _c.Call.Return() return _c } func (_c *MockShell_GetFeatures_Call) RunAndReturn(run func(features *FeaturesInfo)) *MockShell_GetFeatures_Call { _c.Run(run) return _c } // GetName provides a mock function for the type MockShell func (_mock *MockShell) GetName() string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for GetName") } var r0 string if returnFunc, ok := ret.Get(0).(func() string); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(string) } return r0 } // MockShell_GetName_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetName' type MockShell_GetName_Call struct { *mock.Call } // GetName is a helper method to define mock.On call func (_e *MockShell_Expecter) GetName() *MockShell_GetName_Call { return &MockShell_GetName_Call{Call: _e.mock.On("GetName")} } func (_c *MockShell_GetName_Call) Run(run func()) *MockShell_GetName_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockShell_GetName_Call) Return(s string) *MockShell_GetName_Call { _c.Call.Return(s) return _c } func (_c *MockShell_GetName_Call) RunAndReturn(run func() string) *MockShell_GetName_Call { _c.Call.Return(run) return _c } // IsDefault provides a mock function for the type MockShell func (_mock *MockShell) IsDefault() bool { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for IsDefault") } var r0 bool if returnFunc, ok := ret.Get(0).(func() bool); ok { r0 = returnFunc() } else { r0 = ret.Get(0).(bool) } return r0 } // MockShell_IsDefault_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsDefault' type MockShell_IsDefault_Call struct { *mock.Call } // IsDefault is a helper method to define mock.On call func (_e *MockShell_Expecter) IsDefault() *MockShell_IsDefault_Call { return &MockShell_IsDefault_Call{Call: _e.mock.On("IsDefault")} } func (_c *MockShell_IsDefault_Call) Run(run func()) *MockShell_IsDefault_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockShell_IsDefault_Call) Return(b bool) *MockShell_IsDefault_Call { _c.Call.Return(b) return _c } func (_c *MockShell_IsDefault_Call) RunAndReturn(run func() bool) *MockShell_IsDefault_Call { _c.Call.Return(run) return _c } ================================================ FILE: common/network.go ================================================ package common import ( "bytes" "context" "io" "time" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) type ( UpdateState int PatchState int UploadState int DownloadState int JobState string ) // ContentProvider interface that can provide both the reader and optionally the content length type ContentProvider interface { // GetReader returns a new io.ReadCloser for the content. // The caller is responsible for closing the returned ReadCloser when done. // Each call to GetReader must return a fresh reader starting from the beginning of the content. GetReader() (io.ReadCloser, error) // GetContentLength returns the content length and whether it's known. // If the second return value is false, the content length is unknown // and chunked transfer encoding should be used. GetContentLength() (int64, bool) } // BytesProvider implements ContentProvider for fixed, in-memory byte slices type BytesProvider struct { Data []byte } // GetReader returns a new reader for the byte slice. // Caller must close the returned ReadCloser when done. func (p BytesProvider) GetReader() (io.ReadCloser, error) { return io.NopCloser(bytes.NewReader(p.Data)), nil } // GetContentLength returns the exact length of the byte slice. func (p BytesProvider) GetContentLength() (int64, bool) { return int64(len(p.Data)), true // Length is known } // StreamProvider implements ContentProvider for streamed data where you don't want to // or can't determine the size upfront. type StreamProvider struct { // ReaderFactory should return a fresh io.ReadCloser each time it's called. // Each io.ReadCloser should start reading from the beginning of the content. ReaderFactory func() (io.ReadCloser, error) } // GetReader returns a new ReadCloser by calling the ReaderFactory. // Caller must close the returned ReadCloser when done. func (p StreamProvider) GetReader() (io.ReadCloser, error) { return p.ReaderFactory() } // GetContentLength indicates the content length is unknown. func (p StreamProvider) GetContentLength() (int64, bool) { return 0, false // Length is unknown, use chunked encoding } const ( Pending JobState = "pending" Running JobState = "running" Failed JobState = "failed" Success JobState = "success" ) const ( ScriptFailure spec.JobFailureReason = "script_failure" RunnerSystemFailure spec.JobFailureReason = "runner_system_failure" JobExecutionTimeout spec.JobFailureReason = "job_execution_timeout" ImagePullFailure spec.JobFailureReason = "image_pull_failure" UnknownFailure spec.JobFailureReason = "unknown_failure" // ConfigurationError indicates an error in the CI configuration that can only be determined by runner (and not by // Rails). The typical example incompatible pull policies. Since this failure reason does not exist in rails, we map // it to ScriptFailure below, which is more or less correct in that it's ultimately a user error. ConfigurationError spec.JobFailureReason = "configuration_error" // When defining new job failure reasons, consider if its meaning is // extracted from the scope of already existing one. If yes - update // the failureReasonsCompatibilityMap variable below. // Always update the allFailureReasons list // JobCanceled is only internal to runner, and not used inside of rails. JobCanceled spec.JobFailureReason = "job_canceled" ) var ( // allFailureReasons contains the list of all failure reasons known to runner. allFailureReasons = []spec.JobFailureReason{ ScriptFailure, RunnerSystemFailure, JobExecutionTimeout, ImagePullFailure, UnknownFailure, ConfigurationError, JobCanceled, } // failureReasonsCompatibilityMap maps failure reasons that are not // supported by GitLab to failure reasons that are supported. This is // used to provide backward compatibility when new failure reasons are // introduced in runner but not yet supported by GitLab (and not in the // supported list check). failureReasonsCompatibilityMap = map[spec.JobFailureReason]spec.JobFailureReason{ ImagePullFailure: RunnerSystemFailure, ConfigurationError: ScriptFailure, } // A small list of failure reasons that are supported by all // GitLab instances. alwaysSupportedFailureReasons = []spec.JobFailureReason{ ScriptFailure, RunnerSystemFailure, JobExecutionTimeout, } ) const ( UpdateSucceeded UpdateState = iota UpdateAcceptedButNotCompleted UpdateTraceValidationFailed UpdateNotFound UpdateAbort UpdateFailed ) const ( PatchSucceeded PatchState = iota PatchNotFound PatchAbort PatchRangeMismatch PatchFailed ) const ( UploadSucceeded UploadState = iota UploadTooLarge UploadForbidden UploadFailed UploadServiceUnavailable UploadRedirected ) const ( DownloadSucceeded DownloadState = iota DownloadForbidden DownloadUnauthorized DownloadFailed DownloadNotFound ) type FeaturesInfo struct { Variables bool `json:"variables"` Image bool `json:"image"` Services bool `json:"services"` Artifacts bool `json:"artifacts"` Cache bool `json:"cache"` FallbackCacheKeys bool `json:"fallback_cache_keys"` Shared bool `json:"shared"` UploadMultipleArtifacts bool `json:"upload_multiple_artifacts"` UploadRawArtifacts bool `json:"upload_raw_artifacts"` Session bool `json:"session"` Terminal bool `json:"terminal"` Refspecs bool `json:"refspecs"` Masking bool `json:"masking"` Proxy bool `json:"proxy"` RawVariables bool `json:"raw_variables"` ArtifactsExclude bool `json:"artifacts_exclude"` MultiBuildSteps bool `json:"multi_build_steps"` TraceReset bool `json:"trace_reset"` TraceChecksum bool `json:"trace_checksum"` TraceSize bool `json:"trace_size"` VaultSecrets bool `json:"vault_secrets"` Cancelable bool `json:"cancelable"` ReturnExitCode bool `json:"return_exit_code"` ServiceVariables bool `json:"service_variables"` ServiceMultipleAliases bool `json:"service_multiple_aliases"` ImageExecutorOpts bool `json:"image_executor_opts"` ServiceExecutorOpts bool `json:"service_executor_opts"` CancelGracefully bool `json:"cancel_gracefully"` NativeStepsIntegration bool `json:"native_steps_integration"` TwoPhaseJobCommit bool `json:"two_phase_job_commit"` JobInputs bool `json:"job_inputs"` } type ConfigInfo struct { Gpus string `json:"gpus"` } type RegisterRunnerParameters struct { Description string `json:"description,omitempty"` MaintenanceNote string `json:"maintenance_note,omitempty"` Tags string `json:"tag_list,omitempty"` RunUntagged bool `json:"run_untagged"` Locked bool `json:"locked"` AccessLevel string `json:"access_level,omitempty"` MaximumTimeout int `json:"maximum_timeout,omitempty"` Paused bool `json:"paused"` } type RegisterRunnerRequest struct { RegisterRunnerParameters Info Info `json:"info,omitempty"` Token string `json:"token,omitempty"` } type RegisterRunnerResponse struct { ID int64 `json:"id,omitempty"` Token string `json:"token,omitempty"` TokenExpiresAt time.Time `json:"token_expires_at,omitempty"` } type VerifyRunnerRequest struct { Token string `json:"token,omitempty"` SystemID string `json:"system_id,omitempty"` } type VerifyRunnerResponse struct { ID int64 `json:"id,omitempty"` Token string `json:"token,omitempty"` TokenExpiresAt time.Time `json:"token_expires_at,omitempty"` } type UnregisterRunnerRequest struct { Token string `json:"token,omitempty"` } type UnregisterRunnerManagerRequest struct { Token string `json:"token,omitempty"` SystemID string `json:"system_id"` } type ResetTokenRequest struct { Token string `json:"token,omitempty"` } type ResetTokenResponse struct { Token string `json:"token,omitempty"` TokenObtainedAt time.Time TokenExpiresAt time.Time `json:"token_expires_at,omitempty"` } type Info struct { Name string `json:"name,omitempty"` Version string `json:"version,omitempty"` Revision string `json:"revision,omitempty"` Platform string `json:"platform,omitempty"` Architecture string `json:"architecture,omitempty"` Executor string `json:"executor,omitempty"` Shell string `json:"shell,omitempty"` Features FeaturesInfo `json:"features"` Config ConfigInfo `json:"config,omitempty"` Labels Labels `json:"labels,omitempty"` } type JobRequest struct { Info Info `json:"info,omitempty"` Token string `json:"token,omitempty"` SystemID string `json:"system_id,omitempty"` LastUpdate string `json:"last_update,omitempty"` Session *SessionInfo `json:"session,omitempty"` } type SessionInfo struct { URL string `json:"url,omitempty"` Certificate string `json:"certificate,omitempty"` Authorization string `json:"authorization,omitempty"` } type UpdateJobRequest struct { Info Info `json:"info,omitempty"` Token string `json:"token,omitempty"` State JobState `json:"state,omitempty"` FailureReason spec.JobFailureReason `json:"failure_reason,omitempty"` Checksum string `json:"checksum,omitempty"` // deprecated Output JobTraceOutput `json:"output,omitempty"` ExitCode int `json:"exit_code,omitempty"` } type JobTraceOutput struct { Checksum string `json:"checksum,omitempty"` Bytesize int `json:"bytesize,omitempty"` } type JobCredentials struct { ID int64 `long:"id" env:"CI_JOB_ID" description:"The build ID to download and upload artifacts for"` Token string `long:"token" env:"CI_JOB_TOKEN" required:"true" description:"Build token"` URL string `long:"url" env:"CI_SERVER_URL" required:"true" description:"GitLab CI URL"` TLSCAFile string `long:"tls-ca-file" env:"CI_SERVER_TLS_CA_FILE" description:"File containing the certificates to verify the peer when using HTTPS"` TLSCertFile string `long:"tls-cert-file" env:"CI_SERVER_TLS_CERT_FILE" description:"File containing certificate for TLS client auth with runner when using HTTPS"` TLSKeyFile string `long:"tls-key-file" env:"CI_SERVER_TLS_KEY_FILE" description:"File containing private key for TLS client auth with runner when using HTTPS"` } func (j *JobCredentials) GetURL() string { return j.URL } func (j *JobCredentials) GetTLSCAFile() string { return j.TLSCAFile } func (j *JobCredentials) GetTLSCertFile() string { return j.TLSCertFile } func (j *JobCredentials) GetTLSKeyFile() string { return j.TLSKeyFile } func (j *JobCredentials) GetToken() string { return j.Token } type UpdateJobInfo struct { ID int64 State JobState FailureReason spec.JobFailureReason Output JobTraceOutput ExitCode int } type RouterDiscovery struct { ServerURL string `json:"server_url"` TLSData spec.TLSData `json:"-"` } type FailuresCollector interface { RecordFailure(reason spec.JobFailureReason, runnerConfig RunnerConfig, mode JobExecutionMode) } type SupportedFailureReasonMapper interface { Map(fr spec.JobFailureReason) spec.JobFailureReason } type JobTrace interface { io.Writer Success() error Fail(err error, failureData JobFailureData) error Finish() SetCancelFunc(cancelFunc context.CancelFunc) Cancel() bool SetAbortFunc(abortFunc context.CancelFunc) Abort() bool SetFailuresCollector(fc FailuresCollector) SetSupportedFailureReasonMapper(f SupportedFailureReasonMapper) SetDebugModeEnabled(isEnabled bool) IsStdout() bool } type UpdateJobResult struct { State UpdateState CancelRequested bool NewUpdateInterval time.Duration } type PatchTraceResult struct { SentOffset int CancelRequested bool State PatchState NewUpdateInterval time.Duration } func NewPatchTraceResult(sentOffset int, state PatchState, newUpdateInterval int) PatchTraceResult { return PatchTraceResult{ SentOffset: sentOffset, State: state, NewUpdateInterval: time.Duration(newUpdateInterval) * time.Second, } } type ArtifactsOptions struct { BaseName string ExpireIn string Format spec.ArtifactFormat Type string LogResponseDetails bool } type Network interface { SetConnectionMaxAge(time.Duration) RegisterRunner(config RunnerConfig, parameters RegisterRunnerParameters) *RegisterRunnerResponse VerifyRunner(config RunnerConfig, systemID string) *VerifyRunnerResponse UnregisterRunner(config RunnerConfig) bool UnregisterRunnerManager(config RunnerConfig, systemID string) bool ResetToken(runner RunnerConfig, systemID string) *ResetTokenResponse ResetTokenWithPAT(runner RunnerConfig, systemID string, pat string) *ResetTokenResponse RequestJob(ctx context.Context, config RunnerConfig, sessionInfo *SessionInfo) (*spec.Job, bool) UpdateJob(config RunnerConfig, jobCredentials *JobCredentials, jobInfo UpdateJobInfo) UpdateJobResult PatchTrace(config RunnerConfig, jobCredentials *JobCredentials, content []byte, startOffset int, debugModeEnabled bool) PatchTraceResult DownloadArtifacts(config JobCredentials, artifactsFile io.WriteCloser, directDownload *bool) DownloadState UploadRawArtifacts(config JobCredentials, bodyProvider ContentProvider, options ArtifactsOptions) (UploadState, string) ProcessJob(config RunnerConfig, buildCredentials *JobCredentials) (JobTrace, error) } ================================================ FILE: common/network_test.go ================================================ //go:build !integration package common import ( "encoding/json" "fmt" "strconv" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func TestCacheCheckPolicy(t *testing.T) { for num, tc := range []struct { object spec.CachePolicy subject spec.CachePolicy expected bool expectErr bool description string }{ {spec.CachePolicyPullPush, spec.CachePolicyPull, true, false, "pull-push allows pull"}, {spec.CachePolicyPullPush, spec.CachePolicyPush, true, false, "pull-push allows push"}, {spec.CachePolicyUndefined, spec.CachePolicyPull, true, false, "undefined allows pull"}, {spec.CachePolicyUndefined, spec.CachePolicyPush, true, false, "undefined allows push"}, {spec.CachePolicyPull, spec.CachePolicyPull, true, false, "pull allows pull"}, {spec.CachePolicyPull, spec.CachePolicyPush, false, false, "pull forbids push"}, {spec.CachePolicyPush, spec.CachePolicyPull, false, false, "push forbids pull"}, {spec.CachePolicyPush, spec.CachePolicyPush, true, false, "push allows push"}, {"unknown", spec.CachePolicyPull, false, true, "unknown raises error on pull"}, {"unknown", spec.CachePolicyPush, false, true, "unknown raises error on push"}, } { cache := spec.Cache{Policy: tc.object} result, err := cache.CheckPolicy(tc.subject) if tc.expectErr { assert.Errorf(t, err, "case %d: %s", num, tc.description) } else { assert.NoErrorf(t, err, "case %d: %s", num, tc.description) } assert.Equal(t, tc.expected, result, "case %d: %s", num, tc.description) } } func TestShouldCache(t *testing.T) { for _, params := range []struct { jobSuccess bool when spec.CacheWhen expectedShouldCache bool }{ {true, spec.CacheWhenOnSuccess, true}, {true, spec.CacheWhenAlways, true}, {true, spec.CacheWhenOnFailure, false}, {false, spec.CacheWhenOnSuccess, false}, {false, spec.CacheWhenAlways, true}, {false, spec.CacheWhenOnFailure, true}, } { tn := "jobSuccess=" + strconv.FormatBool(params.jobSuccess) + ",when=" + string(params.when) t.Run(tn, func(t *testing.T) { expected := params.expectedShouldCache actual := params.when.ShouldCache(params.jobSuccess) assert.Equal( t, actual, expected, "Value returned from ShouldCache was not as expected", ) }) } } func TestSecrets_expandVariables(t *testing.T) { testServerURL := "server-url" testNamespace := "custom-namespace" testAuthName := "auth-name" testAuthPath := "auth-path" testAuthJWT := "auth-jwt" testAuthRole := "auth-role" testAuthUnknown := "auth-unknown" testEngineName := "engine-name" testEnginePath := "engine-path" testPath := "secret-path" testField := "secret-field" variables := spec.Variables{ {Key: "CI_VAULT_SERVER_URL", Value: testServerURL}, {Key: "CI_VAULT_NAMESPACE", Value: testNamespace}, {Key: "CI_VAULT_AUTH_NAME", Value: testAuthName}, {Key: "CI_VAULT_AUTH_PATH", Value: testAuthPath}, {Key: "CI_VAULT_AUTH_JWT", Value: testAuthJWT}, {Key: "CI_VAULT_AUTH_ROLE", Value: testAuthRole}, {Key: "CI_VAULT_AUTH_UNKNOWN_DATA", Value: testAuthUnknown}, {Key: "CI_VAULT_ENGINE_NAME", Value: testEngineName}, {Key: "CI_VAULT_ENGINE_PATH", Value: testEnginePath}, {Key: "CI_VAULT_PATH", Value: testPath}, {Key: "CI_VAULT_FIELD", Value: testField}, } assertValue := func(t *testing.T, prefix string, variableValue string, testedValue interface{}) { assert.Equal( t, fmt.Sprintf("%s %s", prefix, variableValue), testedValue, ) } tests := map[string]struct { secrets spec.Secrets assertSecrets func(t *testing.T, secrets spec.Secrets) }{ "no secrets defined": { secrets: nil, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Nil(t, secrets) }, }, "nil vault secret": { secrets: spec.Secrets{ "VAULT": spec.Secret{ Vault: nil, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Nil(t, secrets["VAULT"].Vault) }, }, "vault missing data": { secrets: spec.Secrets{ "VAULT": spec.Secret{ Vault: &spec.VaultSecret{}, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.NotNil(t, secrets["VAULT"].Vault) }, }, "vault missing jwt data": { secrets: spec.Secrets{ "VAULT": spec.Secret{ Vault: &spec.VaultSecret{ Server: spec.VaultServer{ Auth: spec.VaultAuth{ Data: map[string]interface{}{ "role": testAuthRole, }, }, }, }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { require.NotNil(t, secrets["VAULT"].Vault) assert.Equal(t, testAuthRole, secrets["VAULT"].Vault.Server.Auth.Data["role"]) }, }, "vault secret defined": { secrets: spec.Secrets{ "VAULT": spec.Secret{ Vault: &spec.VaultSecret{ Server: spec.VaultServer{ URL: "url ${CI_VAULT_SERVER_URL}", Namespace: "namespace ${CI_VAULT_NAMESPACE}", Auth: spec.VaultAuth{ Name: "name ${CI_VAULT_AUTH_NAME}", Path: "path ${CI_VAULT_AUTH_PATH}", Data: map[string]interface{}{ "jwt": "jwt ${CI_VAULT_AUTH_JWT}", "role": "role ${CI_VAULT_AUTH_ROLE}", "unknown": "unknown ${CI_VAULT_AUTH_UNKNOWN_DATA}", }, }, }, Engine: spec.VaultEngine{ Name: "name ${CI_VAULT_ENGINE_NAME}", Path: "path ${CI_VAULT_ENGINE_PATH}", }, Path: "path ${CI_VAULT_PATH}", Field: "field ${CI_VAULT_FIELD}", }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { require.NotNil(t, secrets["VAULT"].Vault) assertValue(t, "url", testServerURL, secrets["VAULT"].Vault.Server.URL) assertValue(t, "namespace", testNamespace, secrets["VAULT"].Vault.Server.Namespace) assertValue(t, "name", testAuthName, secrets["VAULT"].Vault.Server.Auth.Name) assertValue(t, "path", testAuthPath, secrets["VAULT"].Vault.Server.Auth.Path) require.NotNil(t, secrets["VAULT"].Vault.Server.Auth.Data["jwt"]) assertValue(t, "jwt", testAuthJWT, secrets["VAULT"].Vault.Server.Auth.Data["jwt"]) require.NotNil(t, secrets["VAULT"].Vault.Server.Auth.Data["role"]) assertValue(t, "role", testAuthRole, secrets["VAULT"].Vault.Server.Auth.Data["role"]) require.NotNil(t, secrets["VAULT"].Vault.Server.Auth.Data["unknown"]) assertValue(t, "unknown", testAuthUnknown, secrets["VAULT"].Vault.Server.Auth.Data["unknown"]) assertValue(t, "name", testEngineName, secrets["VAULT"].Vault.Engine.Name) assertValue(t, "path", testEnginePath, secrets["VAULT"].Vault.Engine.Path) assertValue(t, "path", testPath, secrets["VAULT"].Vault.Path) assertValue(t, "field", testField, secrets["VAULT"].Vault.Field) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.NotPanics(t, func() { tt.secrets.ExpandVariables(variables) tt.assertSecrets(t, tt.secrets) }) }) } } func TestGCPSecretManagerSecrets_expandVariables(t *testing.T) { secretName := "my-secret-1234" secretVersion := "version-999" projectNumber := "8888" poolId := "my-pool-123" providerId := "my-provider-123" jwt := "my-jwt" variables := spec.Variables{ {Key: "NAME", Value: secretName}, {Key: "VERSION", Value: secretVersion}, {Key: "PROJECT_NUMBER", Value: projectNumber}, {Key: "POOL_ID", Value: poolId}, {Key: "PROVIDER_ID", Value: providerId}, {Key: "JWT", Value: jwt}, } tests := map[string]struct { secrets spec.Secrets assertSecrets func(t *testing.T, secrets spec.Secrets) }{ "no secrets defined": { secrets: nil, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Nil(t, secrets) }, }, "empty data": { secrets: spec.Secrets{ "VAULT": spec.Secret{ GCPSecretManager: &spec.GCPSecretManagerSecret{}, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Equal(t, &spec.GCPSecretManagerSecret{}, secrets["VAULT"].GCPSecretManager) }, }, "without expansion": { secrets: spec.Secrets{ "VAULT": spec.Secret{ GCPSecretManager: &spec.GCPSecretManagerSecret{ Name: "my-secret", Version: "latest", Server: spec.GCPSecretManagerServer{ ProjectNumber: "1234", WorkloadIdentityFederationPoolId: "pool-id", WorkloadIdentityFederationProviderID: "provider-id", JWT: "jwt", }, }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Equal(t, "my-secret", secrets["VAULT"].GCPSecretManager.Name) assert.Equal(t, "latest", secrets["VAULT"].GCPSecretManager.Version) assert.Equal(t, "1234", secrets["VAULT"].GCPSecretManager.Server.ProjectNumber) assert.Equal(t, "pool-id", secrets["VAULT"].GCPSecretManager.Server.WorkloadIdentityFederationPoolId) assert.Equal(t, "provider-id", secrets["VAULT"].GCPSecretManager.Server.WorkloadIdentityFederationProviderID) assert.Equal(t, "jwt", secrets["VAULT"].GCPSecretManager.Server.JWT) }, }, "with expansion": { secrets: spec.Secrets{ "VAULT": spec.Secret{ GCPSecretManager: &spec.GCPSecretManagerSecret{ Name: "$NAME", Version: "$VERSION", Server: spec.GCPSecretManagerServer{ ProjectNumber: "$PROJECT_NUMBER", WorkloadIdentityFederationPoolId: "$POOL_ID", WorkloadIdentityFederationProviderID: "$PROVIDER_ID", JWT: "$JWT", }, }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Equal(t, secretName, secrets["VAULT"].GCPSecretManager.Name) assert.Equal(t, secretVersion, secrets["VAULT"].GCPSecretManager.Version) assert.Equal(t, projectNumber, secrets["VAULT"].GCPSecretManager.Server.ProjectNumber) assert.Equal(t, poolId, secrets["VAULT"].GCPSecretManager.Server.WorkloadIdentityFederationPoolId) assert.Equal(t, providerId, secrets["VAULT"].GCPSecretManager.Server.WorkloadIdentityFederationProviderID) assert.Equal(t, jwt, secrets["VAULT"].GCPSecretManager.Server.JWT) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.NotPanics(t, func() { tt.secrets.ExpandVariables(variables) tt.assertSecrets(t, tt.secrets) }) }) } } func TestAzureKeyVaultSecrets_expandVariables(t *testing.T) { testName := "key-name" testVersion := "key-version" testAuthJWT := "auth-jwt" variables := spec.Variables{ {Key: "CI_AZURE_KEY_VAULT_KEY_NAME", Value: testName}, {Key: "CI_AZURE_KEY_VAULT_KEY_VERSION", Value: testVersion}, {Key: "CI_AZURE_KEY_VAULT_AUTH_JWT", Value: testAuthJWT}, } assertValue := func(t *testing.T, prefix string, variableValue string, testedValue interface{}) { assert.Equal( t, fmt.Sprintf("%s %s", prefix, variableValue), testedValue, ) } tests := map[string]struct { secrets spec.Secrets assertSecrets func(t *testing.T, secrets spec.Secrets) }{ "no secrets defined": { secrets: nil, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Nil(t, secrets) }, }, "nil vault secret": { secrets: spec.Secrets{ "VAULT": spec.Secret{ AzureKeyVault: nil, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.Nil(t, secrets["VAULT"].Vault) }, }, "vault missing data": { secrets: spec.Secrets{ "VAULT": spec.Secret{ AzureKeyVault: &spec.AzureKeyVaultSecret{}, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { assert.NotNil(t, secrets["VAULT"].AzureKeyVault) }, }, "vault missing jwt data": { secrets: spec.Secrets{ "VAULT": spec.Secret{ AzureKeyVault: &spec.AzureKeyVaultSecret{ Name: testName, Version: testVersion, Server: spec.AzureKeyVaultServer{ ClientID: "test_client_id", TenantID: "test_tenant_id", URL: "test_url", }, }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { require.NotNil(t, secrets["VAULT"].AzureKeyVault) assert.Equal(t, testName, secrets["VAULT"].AzureKeyVault.Name) assert.Equal(t, testVersion, secrets["VAULT"].AzureKeyVault.Version) }, }, "vault secret defined": { secrets: spec.Secrets{ "VAULT": spec.Secret{ AzureKeyVault: &spec.AzureKeyVaultSecret{ Name: "name ${CI_AZURE_KEY_VAULT_KEY_NAME}", Version: "version ${CI_AZURE_KEY_VAULT_KEY_VERSION}", Server: spec.AzureKeyVaultServer{ ClientID: "client_id", TenantID: "tenant_id", JWT: "jwt ${CI_AZURE_KEY_VAULT_AUTH_JWT}", URL: "url", }, }, }, }, assertSecrets: func(t *testing.T, secrets spec.Secrets) { require.NotNil(t, secrets["VAULT"].AzureKeyVault) assertValue(t, "name", testName, secrets["VAULT"].AzureKeyVault.Name) assertValue(t, "version", testVersion, secrets["VAULT"].AzureKeyVault.Version) assertValue(t, "jwt", testAuthJWT, secrets["VAULT"].AzureKeyVault.Server.JWT) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { assert.NotPanics(t, func() { tt.secrets.ExpandVariables(variables) tt.assertSecrets(t, tt.secrets) }) }) } } func TestJobResponse_JobURL(t *testing.T) { jobID := int64(1) testCases := map[string]string{ "http://user:pass@gitlab.example.com/my-namespace/my-project.git": "http://gitlab.example.com/my-namespace/my-project/-/jobs/1", "http://user:pass@gitlab.example.com/my-namespace/my-project": "http://gitlab.example.com/my-namespace/my-project/-/jobs/1", "http://user:pass@gitlab.example.com/my-namespace/my.git.project.git": "http://gitlab.example.com/my-namespace/my.git.project/-/jobs/1", "http://user:pass@gitlab.example.com/my-namespace/my.git.project": "http://gitlab.example.com/my-namespace/my.git.project/-/jobs/1", } for repoURL, expectedURL := range testCases { job := spec.Job{ ID: jobID, GitInfo: spec.GitInfo{ RepoURL: repoURL, }, } assert.Equal(t, expectedURL, job.JobURL()) } } func Test_Image_ExecutorOptions_UnmarshalJSON(t *testing.T) { emptyUser := spec.StringOrInt64("") uid1000 := spec.StringOrInt64("1000") ubuntuUser := spec.StringOrInt64("ubuntu") tests := map[string]struct { json string expected func(*testing.T, spec.Image) expectedErrMsg []string }{ "no executor_opts": { json: `{"executor_opts":{}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) }, }, "docker, empty": { json: `{"executor_opts":{"docker": {}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) }, }, "docker, only user": { json: `{"executor_opts":{"docker": {"user": "ubuntu"}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, ubuntuUser, i.ExecutorOptions.Docker.User) }, }, "docker, only platform": { json: `{"executor_opts":{"docker": {"platform": "amd64"}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "amd64", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) }, }, "docker, all options": { json: `{"executor_opts":{"docker": {"platform": "arm64", "user": "ubuntu"}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "arm64", i.ExecutorOptions.Docker.Platform) assert.Equal(t, ubuntuUser, i.ExecutorOptions.Docker.User) }, }, "docker, invalid options": { json: `{"executor_opts":{"docker": {"foobar": 1234}}}`, expectedErrMsg: []string{`Unsupported "image" options [foobar] for "docker executor"; supported options are [platform user]`}, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) }, }, "kubernetes, empty": { json: `{"executor_opts":{"kubernetes": {}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, emptyUser, i.ExecutorOptions.Kubernetes.User) }, }, "kubernetes, all options": { json: `{"executor_opts":{"kubernetes": {"user": "1000"}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, uid1000, i.ExecutorOptions.Kubernetes.User) }, }, "kubernetes, user as int64": { json: `{"executor_opts":{"kubernetes": {"user": 1000}}}`, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, uid1000, i.ExecutorOptions.Kubernetes.User) }, }, "kubernetes, invalid options": { json: `{"executor_opts":{"kubernetes": {"foobar": 1234}}}`, expectedErrMsg: []string{`Unsupported "image" options [foobar] for "kubernetes executor"; supported options are [user]`}, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) assert.Equal(t, emptyUser, i.ExecutorOptions.Kubernetes.User) }, }, "invalid executor": { json: `{"executor_opts":{"k8s": {}}}`, expectedErrMsg: []string{`Unsupported "image" options [k8s] for "executor_opts"; supported options are [docker kubernetes]`}, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) assert.Equal(t, emptyUser, i.ExecutorOptions.Kubernetes.User) }, }, "docker, invalid executor, valid executor, invalid option": { json: `{"executor_opts":{"k8s": {}, "docker": {"platform": "amd64", "foobar": 1234}}}`, expectedErrMsg: []string{ `Unsupported "image" options [k8s] for "executor_opts"; supported options are [docker kubernetes]`, `Unsupported "image" options [foobar] for "docker executor"; supported options are [platform user]`, }, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, "amd64", i.ExecutorOptions.Docker.Platform) assert.Equal(t, emptyUser, i.ExecutorOptions.Docker.User) }, }, "kubernetes, invalid executor, valid executor, invalid option": { json: `{"executor_opts":{"dockers": {}, "kubernetes": {"user": "1000", "foobar": 1234}}}`, expectedErrMsg: []string{ `Unsupported "image" options [dockers] for "executor_opts"; supported options are [docker kubernetes]`, `Unsupported "image" options [foobar] for "kubernetes executor"; supported options are [user]`, }, expected: func(t *testing.T, i spec.Image) { assert.Equal(t, uid1000, i.ExecutorOptions.Kubernetes.User) }, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { got := spec.Image{} err := json.Unmarshal([]byte(tt.json), &got) assert.NoError(t, err) tt.expected(t, got) if len(tt.expectedErrMsg) == 0 { assert.Nil(t, got.UnsupportedOptions()) } else { for i := range tt.expectedErrMsg { assert.Contains(t, got.UnsupportedOptions().Error(), tt.expectedErrMsg[i]) } } }) } } func TestJobResponse_Run(t *testing.T) { tests := map[string]struct { json string wantJSON string wantErr bool execNativeSteps bool }{ "steps not requested": { json: `{}`, wantJSON: `{}`, }, "steps not requested, image is unmodified": { json: `{"image":{"name":"registry.gitlab.com/project/image:v1"}}`, wantJSON: `{"image":{"name":"registry.gitlab.com/project/image:v1"}}`, }, "steps are requested via shim, default image set": { json: `{"run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]"}`, wantJSON: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "variables":[ { "key":"STEPS", "value":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "raw":true } ], "steps":[ { "name":"script", "script":["step-runner ci"], "timeout":3600, "when":"on_success" } ] }`, }, "steps are requested via shim, image unmodified": { json: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "image":{"name":"registry.gitlab.com/project/image:v1"} }`, wantJSON: ` { "run":"[{\"Name\":\"hello\",\"Script\":\"echo hello world\"}]", "variables":[ { "key":"STEPS", "value":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "raw":true } ], "steps":[ { "name":"script", "script":["step-runner ci"], "timeout":3600, "when":"on_success" } ], "image":{"name":"registry.gitlab.com/project/image:v1"} }`, }, "steps and script are requested": { json: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "Steps":[ { "name":"script", "script":["echo hello job"], "timeout":3600, "when":"on_success" } ] }`, wantErr: true, }, "steps requested and STEP variable used": { json: ` { "run":"[{\"Name\":\"hello\",\"script\":\"echo hello world\"}]", "variables":[ { "key":"STEPS", "value":"not steps", "raw":true } ] }`, wantErr: true, }, "steps request via native exec, executor supports native exec": { execNativeSteps: true, json: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "variables":[ { "key":"FF_USE_NATIVE_STEPS", "value":"true" } ] }`, wantJSON: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "variables":[ { "key":"FF_USE_NATIVE_STEPS", "value":"true" } ], "steps":[ { "name":"run" } ] }`, }, "steps request via native exec, executor does not support native exec": { execNativeSteps: false, json: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "variables":[ { "key":"FF_USE_NATIVE_STEPS", "value":"true" } ] }`, wantJSON: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "Variables":[ { "key":"FF_USE_NATIVE_STEPS", "value":"true" }, { "key":"STEPS", "value":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "raw":true } ], "steps":[ { "name":"script", "script":["step-runner ci"], "timeout":3600, "when":"on_success" } ] }`, }, "steps are requested via shim, executor supports native exec": { execNativeSteps: true, json: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]" }`, wantJSON: ` { "run":"[{\"name\":\"hello\",\"script\":\"echo hello world\"}]", "steps":[ { "name":"run" } ] }`, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { jobResponse := &spec.Job{} require.NoError(t, json.Unmarshal([]byte(tt.json), &jobResponse)) err := jobResponse.ValidateStepsJobRequest(tt.execNativeSteps) if tt.wantErr { require.Error(t, err) return } require.NoError(t, err) want := &spec.Job{} require.NoError(t, json.Unmarshal([]byte(tt.wantJSON), &want)) require.Equal(t, want, jobResponse) }) } } func TestFeaturesInfo_JSONMarshaling(t *testing.T) { tests := []struct { name string features FeaturesInfo expected string }{ { name: "all default (disabled)", features: FeaturesInfo{}, expected: `{"variables":false,"image":false,"services":false,"artifacts":false,"cache":false,"fallback_cache_keys":false,"shared":false,"upload_multiple_artifacts":false,"upload_raw_artifacts":false,"session":false,"terminal":false,"refspecs":false,"masking":false,"proxy":false,"raw_variables":false,"artifacts_exclude":false,"multi_build_steps":false,"trace_reset":false,"trace_checksum":false,"trace_size":false,"vault_secrets":false,"cancelable":false,"return_exit_code":false,"service_variables":false,"service_multiple_aliases":false,"image_executor_opts":false,"service_executor_opts":false,"cancel_gracefully":false,"native_steps_integration":false,"two_phase_job_commit":false,"job_inputs":false}`, }, { name: "some enabled", features: FeaturesInfo{ Variables: true, Image: true, TwoPhaseJobCommit: true, JobInputs: true, }, expected: `{"variables":true,"image":true,"services":false,"artifacts":false,"cache":false,"fallback_cache_keys":false,"shared":false,"upload_multiple_artifacts":false,"upload_raw_artifacts":false,"session":false,"terminal":false,"refspecs":false,"masking":false,"proxy":false,"raw_variables":false,"artifacts_exclude":false,"multi_build_steps":false,"trace_reset":false,"trace_checksum":false,"trace_size":false,"vault_secrets":false,"cancelable":false,"return_exit_code":false,"service_variables":false,"service_multiple_aliases":false,"image_executor_opts":false,"service_executor_opts":false,"cancel_gracefully":false,"native_steps_integration":false,"two_phase_job_commit":true,"job_inputs":true}`, }, { name: "all enabled", features: FeaturesInfo{ Variables: true, Image: true, Services: true, Artifacts: true, Cache: true, FallbackCacheKeys: true, Shared: true, UploadMultipleArtifacts: true, UploadRawArtifacts: true, Session: true, Terminal: true, Refspecs: true, Masking: true, Proxy: true, RawVariables: true, ArtifactsExclude: true, MultiBuildSteps: true, TraceReset: true, TraceChecksum: true, TraceSize: true, VaultSecrets: true, Cancelable: true, ReturnExitCode: true, ServiceVariables: true, ServiceMultipleAliases: true, ImageExecutorOpts: true, ServiceExecutorOpts: true, CancelGracefully: true, NativeStepsIntegration: true, TwoPhaseJobCommit: true, JobInputs: true, }, expected: `{"variables":true,"image":true,"services":true,"artifacts":true,"cache":true,"fallback_cache_keys":true,"shared":true,"upload_multiple_artifacts":true,"upload_raw_artifacts":true,"session":true,"terminal":true,"refspecs":true,"masking":true,"proxy":true,"raw_variables":true,"artifacts_exclude":true,"multi_build_steps":true,"trace_reset":true,"trace_checksum":true,"trace_size":true,"vault_secrets":true,"cancelable":true,"return_exit_code":true,"service_variables":true,"service_multiple_aliases":true,"image_executor_opts":true,"service_executor_opts":true,"cancel_gracefully":true,"native_steps_integration":true,"two_phase_job_commit":true,"job_inputs":true}`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Test marshaling jsonData, err := json.Marshal(tt.features) require.NoError(t, err) assert.JSONEq(t, tt.expected, string(jsonData)) // Test unmarshaling var features FeaturesInfo err = json.Unmarshal(jsonData, &features) require.NoError(t, err) assert.Equal(t, tt.features, features) }) } } ================================================ FILE: common/process_logger_adaptor.go ================================================ package common import ( "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) type ProcessLoggerAdapter struct { buildLogger buildlogger.Logger } func NewProcessLoggerAdapter(buildlogger buildlogger.Logger) *ProcessLoggerAdapter { return &ProcessLoggerAdapter{ buildLogger: buildlogger, } } func (l *ProcessLoggerAdapter) WithFields(fields logrus.Fields) process.Logger { l.buildLogger = *l.buildLogger.WithFields(fields) return l } func (l *ProcessLoggerAdapter) Warn(args ...interface{}) { l.buildLogger.Warningln(args...) } ================================================ FILE: common/reset_token.go ================================================ package common func ResetToken(network Network, runner *RunnerConfig, systemID string, pat string) bool { var res *ResetTokenResponse if pat == "" { res = network.ResetToken(*runner, systemID) } else { res = network.ResetTokenWithPAT(*runner, systemID, pat) } if res == nil { return false } runner.Token = res.Token runner.TokenExpiresAt = res.TokenExpiresAt runner.TokenObtainedAt = res.TokenObtainedAt return true } ================================================ FILE: common/secrets.go ================================================ package common import ( "errors" "fmt" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) type logger interface { Println(args ...interface{}) Warningln(args ...interface{}) } type SecretsResolver interface { Resolve(secrets spec.Secrets) (spec.Variables, error) } type SecretResolverRegistry interface { Register(f secretResolverFactory) GetFor(secret spec.Secret) (SecretResolver, error) } type secretResolverFactory func(secret spec.Secret) SecretResolver type SecretResolver interface { Name() string IsSupported() bool Resolve() (string, error) } var ( secretResolverRegistry = new(defaultSecretResolverRegistry) ErrMissingLogger = errors.New("logger not provided") ErrMissingSecretResolver = errors.New("no resolver that can handle the secret") ErrSecretNotFound = errors.New("secret not found") ) func GetSecretResolverRegistry() SecretResolverRegistry { return secretResolverRegistry } type defaultSecretResolverRegistry struct { factories []secretResolverFactory } func (r *defaultSecretResolverRegistry) Register(f secretResolverFactory) { r.factories = append(r.factories, f) } func (r *defaultSecretResolverRegistry) GetFor(secret spec.Secret) (SecretResolver, error) { for _, f := range r.factories { sr := f(secret) if sr.IsSupported() { return sr, nil } } return nil, ErrMissingSecretResolver } func newSecretsResolver(l logger, registry SecretResolverRegistry, featureFlagOn func(string) bool) (SecretsResolver, error) { if l == nil { return nil, ErrMissingLogger } sr := &defaultSecretsResolver{ logger: l, secretResolverRegistry: registry, featureFlagOn: featureFlagOn, } return sr, nil } type defaultSecretsResolver struct { logger logger secretResolverRegistry SecretResolverRegistry featureFlagOn func(string) bool } func (r *defaultSecretsResolver) Resolve(secrets spec.Secrets) (spec.Variables, error) { if secrets == nil { return nil, nil } msg := fmt.Sprintf( "%sResolving secrets%s", helpers.ANSI_BOLD_CYAN, helpers.ANSI_RESET, ) r.logger.Println(msg) variables := make(spec.Variables, 0) for variableKey, secret := range secrets { r.logger.Println(fmt.Sprintf("Resolving secret %q...", variableKey)) v, err := r.handleSecret(variableKey, secret) if err != nil { return nil, err } if v != nil { variables = append(variables, *v) } } return variables, nil } func (r *defaultSecretsResolver) handleSecret(variableKey string, secret spec.Secret) (*spec.Variable, error) { sr, err := r.secretResolverRegistry.GetFor(secret) if err != nil { r.logger.Warningln(fmt.Sprintf("Not resolved: %v", err)) return nil, nil } r.logger.Println(fmt.Sprintf("Using %q secret resolver...", sr.Name())) value, err := sr.Resolve() if errors.Is(err, ErrSecretNotFound) { if !r.featureFlagOn(featureflags.EnableSecretResolvingFailsIfMissing) { err = nil } else { err = fmt.Errorf("%w: %v", err, variableKey) } } if err != nil { return nil, err } variable := &spec.Variable{ Key: variableKey, Value: value, File: secret.IsFile(), Masked: true, Raw: true, } return variable, nil } ================================================ FILE: common/secrets_test.go ================================================ //go:build !integration package common import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) func TestDefaultResolver_Resolve(t *testing.T) { variableKey := "TEST_VARIABLE" returnValue := "test" secrets := spec.Secrets{ variableKey: spec.Secret{ Vault: &spec.VaultSecret{ Server: spec.VaultServer{ URL: "url", Auth: spec.VaultAuth{ Name: "name", Path: "path", Data: spec.VaultAuthData{"data": "data"}, }, }, Engine: spec.VaultEngine{ Name: "name", Path: "path", }, Path: "path", Field: "field", }, }, } composeSecrets := func(file bool) spec.Secrets { secret := secrets[variableKey] secret.File = &file return spec.Secrets{variableKey: secret} } getLogger := func(t *testing.T) logger { logger := newMockLogger(t) logger.On("Println", mock.Anything).Maybe() return logger } tests := map[string]struct { getLogger func(t *testing.T) logger supportedResolverPresent bool secrets spec.Secrets resolvedVariable *spec.Variable failIfSecretMissing bool errorOnSecretResolving error expectedResolverCreationError error expectedVariables spec.Variables expectedError error }{ "resolver creation error": { getLogger: func(t *testing.T) logger { return nil }, expectedResolverCreationError: ErrMissingLogger, }, "no secrets to resolve": { getLogger: getLogger, supportedResolverPresent: true, secrets: nil, expectedVariables: nil, expectedError: nil, }, "error on secret resolving": { getLogger: getLogger, supportedResolverPresent: true, secrets: secrets, errorOnSecretResolving: assert.AnError, expectedVariables: nil, expectedError: assert.AnError, }, "secret resolved properly - file not defined": { getLogger: getLogger, supportedResolverPresent: true, secrets: secrets, expectedVariables: spec.Variables{ { Key: variableKey, Value: returnValue, File: true, Masked: true, Raw: true, }, }, expectedError: nil, }, "secret resolved properly - file set to true": { getLogger: getLogger, supportedResolverPresent: true, secrets: composeSecrets(true), expectedVariables: spec.Variables{ { Key: variableKey, Value: returnValue, File: true, Masked: true, Raw: true, }, }, expectedError: nil, }, "secret resolved properly - file set to false": { getLogger: getLogger, supportedResolverPresent: true, secrets: composeSecrets(false), expectedVariables: spec.Variables{ { Key: variableKey, Value: returnValue, File: false, Masked: true, Raw: true, }, }, expectedError: nil, }, "no supported resolvers present": { getLogger: func(t *testing.T) logger { logger := newMockLogger(t) logger.On("Println", mock.Anything).Maybe() logger.On("Warningln", mock.Anything).Maybe() return logger }, supportedResolverPresent: false, secrets: secrets, expectedVariables: spec.Variables{}, expectedError: nil, }, "secret not found - fail if missing": { getLogger: getLogger, supportedResolverPresent: true, secrets: secrets, failIfSecretMissing: true, errorOnSecretResolving: ErrSecretNotFound, expectedVariables: nil, expectedError: ErrSecretNotFound, }, "secret not found - succeed if missing": { getLogger: getLogger, supportedResolverPresent: true, secrets: secrets, failIfSecretMissing: false, errorOnSecretResolving: ErrSecretNotFound, expectedVariables: spec.Variables{ { Key: variableKey, Value: returnValue, File: true, Masked: true, Raw: true, }, }, expectedError: nil, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { unsupportedResolver := NewMockSecretResolver(t) supportedResolver := NewMockSecretResolver(t) if tt.secrets != nil { unsupportedResolver.On("IsSupported"). Return(false). Once() supportedResolver.On("IsSupported"). Return(tt.supportedResolverPresent). Once() supportedResolver.On("Name"). Return("supported_resolver"). Maybe() if tt.supportedResolverPresent { supportedResolver.On("Resolve"). Return(returnValue, tt.errorOnSecretResolving). Once() } } registry := new(defaultSecretResolverRegistry) registry.Register(func(secret spec.Secret) SecretResolver { return unsupportedResolver }) registry.Register(func(secret spec.Secret) SecretResolver { return supportedResolver }) logger := tt.getLogger(t) r, err := newSecretsResolver(logger, registry, func(s string) bool { if s == featureflags.EnableSecretResolvingFailsIfMissing { return tt.failIfSecretMissing } return false }) if tt.expectedResolverCreationError != nil { assert.ErrorAs(t, err, &tt.expectedResolverCreationError) return } require.NoError(t, err) variables, err := r.Resolve(tt.secrets) if tt.expectedError != nil { assert.ErrorAs(t, err, &tt.expectedError) return } assert.NoError(t, err) assert.Equal(t, tt.expectedVariables, variables) }) } } ================================================ FILE: common/shell.go ================================================ package common import ( "context" "fmt" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/helpers" ) type ShellConfiguration struct { Command string Arguments []string CmdLine string // combination of shell escaped command + args DockerCommand []string PassFile bool Extension string } type ShellType int const ( // Use NormalShell when running builds inside a Docker container, as it preserves environment variables defined in the Dockerfile. // Use InteractiveShell only when a custom configuration is required for the interactive web terminal. // Use LoginShell in all other scenarios. NormalShell ShellType = iota LoginShell InteractiveShell ) func (s *ShellConfiguration) String() string { return helpers.ToYAML(s) } type ShellScriptInfo struct { Shell string Build *Build Type ShellType User string RunnerCommand string PreGetSourcesScript string PostGetSourcesScript string PreBuildScript string PostBuildScript string } type Shell interface { GetName() string GetFeatures(features *FeaturesInfo) IsDefault() bool GetConfiguration(info ShellScriptInfo) (*ShellConfiguration, error) GenerateScript(ctx context.Context, buildStage BuildStage, info ShellScriptInfo) (string, error) GenerateSaveScript(info ShellScriptInfo, scriptPath, script string) (string, error) GetEntrypointCommand(info ShellScriptInfo, probeFile string) []string } var shells map[string]Shell func RegisterShell(shell Shell) { logrus.Debugln("Registering", shell.GetName(), "shell...") if shells == nil { shells = make(map[string]Shell) } if shells[shell.GetName()] != nil { panic("Shell already exist: " + shell.GetName()) } shells[shell.GetName()] = shell } func GetShell(shell string) Shell { if shells == nil { return nil } return shells[shell] } func GetShellConfiguration(info ShellScriptInfo) (*ShellConfiguration, error) { shell := GetShell(info.Shell) if shell == nil { return nil, fmt.Errorf("shell %s not found", info.Shell) } return shell.GetConfiguration(info) } func GenerateShellScript(ctx context.Context, buildStage BuildStage, info ShellScriptInfo) (string, error) { shell := GetShell(info.Shell) if shell == nil { return "", fmt.Errorf("shell %s not found", info.Shell) } return shell.GenerateScript(ctx, buildStage, info) } func GetDefaultShell() string { if shells == nil { panic("no shells defined") } for _, shell := range shells { if shell.IsDefault() { return shell.GetName() } } panic("no default shell defined") } ================================================ FILE: common/spec/inputs.go ================================================ package spec import ( "encoding/json" "errors" "fmt" "iter" "reflect" "slices" "gitlab.com/gitlab-org/moa" "gitlab.com/gitlab-org/moa/ast" "gitlab.com/gitlab-org/moa/value" "gitlab.com/gitlab-org/step-runner/pkg/api/expression" ) type Inputs struct { inputs []expression.Input evaluator *expression.Evaluator metricsCollector *JobInputsMetricsCollector } type JobInput struct { Key string `json:"key"` Value JobInputValue `json:"value"` } type JobInputValue struct { Type JobInputContentTypeName `json:"type"` Content value.Value `json:"content"` Sensitive bool `json:"sensitive"` } type JobInputContentTypeName string type InputExpander interface { Expand(*Inputs) error } type InputInterpolationError struct { err error } func (e *InputInterpolationError) Error() string { return fmt.Sprintf("failed to interpolate job inputs: %s", e.err.Error()) } const ( JobInputContentTypeNameString JobInputContentTypeName = "string" JobInputContentTypeNameNumber JobInputContentTypeName = "number" JobInputContentTypeNameBoolean JobInputContentTypeName = "boolean" JobInputContentTypeNameArray JobInputContentTypeName = "array" JobInputContentTypeNameStruct JobInputContentTypeName = "struct" ) var ( errInputExpanderNotSupported = errors.New("type does not implement InputExpander") ) func (t JobInputContentTypeName) MoaKind() (value.Kind, error) { switch t { case JobInputContentTypeNameString: return value.StringKind, nil case JobInputContentTypeNameNumber: return value.NumberKind, nil case JobInputContentTypeNameBoolean: return value.BoolKind, nil case JobInputContentTypeNameArray: return value.ArrayKind, nil case JobInputContentTypeNameStruct: return value.ObjectKind, nil default: return value.NullKind, errors.New("type is unknown") } } var ( ErrSensitiveUnsupported = errors.New("sensitive inputs are unsupported in interpolations yet") // errInterpolationFound defines a sentinel error for when an interpolation was detected errInterpolationFound = errors.New("interpolation found") // errJobInputAccessFound defines a sentinel error for when a job input access pattern is detected errJobInputAccessFound = errors.New("job input access found") ) // interpolationDetector is a visitor that detects if the AST contains an Interpolation // The visitor returns a sentinel error if as soon as it encounters the first Template. type interpolationDetector struct{} func (v *interpolationDetector) Enter(expr ast.Expr) (ast.Visitor, error) { if _, ok := expr.(*ast.Template); ok { return nil, errInterpolationFound } return v, nil } func (v *interpolationDetector) Exit(expr ast.Expr) (ast.Expr, error) { return expr, nil } func (i *JobInput) UnmarshalJSON(data []byte) error { type alias JobInput if err := json.Unmarshal(data, (*alias)(i)); err != nil { return err } if err := i.validate(); err != nil { return err } return nil } func (i *JobInput) validate() error { // verify that input has key if i.Key == "" { return fmt.Errorf("input without key") } if i.Value.Content.Kind() == value.NullKind { return fmt.Errorf("input %q is null, must have valid value", i.Key) } // verify that we have a supported and valid input and moa type moaKind, err := i.Value.Type.MoaKind() if err != nil { return fmt.Errorf("invalid type %q for input %q: %w", i.Value.Type, i.Key, err) } // verify that the input content actually has the announced type if moaKind != i.Value.Content.Kind() { return fmt.Errorf("mismatching type of input %q. Announced %q, but got %q", i.Key, moaKind, i.Value.Content.Kind()) } return nil } func (i *Inputs) UnmarshalJSON(data []byte) error { var inputs []JobInput if err := json.Unmarshal(data, &inputs); err != nil { return err } jobInputs, err := NewJobInputs(inputs) if err != nil { return err } *i = jobInputs return nil } func NewJobInputs(inputs []JobInput) (Inputs, error) { i := Inputs{} for _, input := range inputs { // post-process sensitive mark for input value v := input.Value.Content if input.Value.Sensitive { v = v.WithMarks(expression.Sensitive) } i.inputs = append(i.inputs, expression.Input{ Key: input.Key, Value: v, }) } e, err := expression.NewEvaluator(value.Object(&i)) if err != nil { return Inputs{}, err } i.evaluator = e return i, nil } func (i *Inputs) All() iter.Seq2[value.Value, value.Value] { return func(yield func(value.Value, value.Value) bool) { for _, input := range i.inputs { if !yield(value.String(input.Key), input.Value) { return } } } } func (i *Inputs) Attr(a string) (value.Value, error) { idx := slices.IndexFunc(i.inputs, func(x expression.Input) bool { return x.Key == a }) if idx < 0 { return value.Null(), value.ErrAttributeNotFound } return i.inputs[idx].Value, nil } func (i *Inputs) Get(key value.Value) (value.Value, error) { if key.Kind() != value.StringKind { return value.Null(), fmt.Errorf("%w: object requires string key not %v", value.ErrInvalidKey, key.Kind()) } return i.Attr(key.String()) } func (i *Inputs) Keys() iter.Seq[value.Value] { return func(yield func(value.Value) bool) { for _, v := range i.inputs { if !yield(value.String(v.Key)) { return } } } } func (i *Inputs) Len() int { return len(i.inputs) } func (i *Inputs) Values() iter.Seq[value.Value] { return func(yield func(value.Value) bool) { for _, v := range i.inputs { if !yield(v.Value) { return } } } } func (i *Inputs) WithMarks(marks uint16) value.Mapper { // FIXME: what should we do here ... return i } // SetMetricsCollector injects the metrics collector func (i *Inputs) SetMetricsCollector(collector *JobInputsMetricsCollector) { i.metricsCollector = collector } func (i *Inputs) Expand(text string) (string, error) { if i == nil || i.evaluator == nil { return text, nil } // NOTE: check if we don't have any inputs defined to interpolate // We do this to avoid a breaking change when a user already uses // job input interpolation syntax (`${{ .. }}`) but doesn't actually // want to use them. This hides potential errors when a user forgets // to define inputs - but that's easier to debug and not a breaking // change once GitLab enables job inputs but rather at the point in // time when the user wants to use job inputs. // For context see: // https://gitlab.com/gitlab-org/step-runner/-/work_items/369 if len(i.inputs) == 0 { return text, nil } expr, err := moa.ParseTemplate(text) if err != nil { i.metricsCollector.recordParseError() return "", &InputInterpolationError{err: err} } result, err := i.evaluator.Eval(text, expr) if err != nil { i.metricsCollector.recordEvalError() return "", &InputInterpolationError{err: err} } if result.HasMarks(expression.Sensitive) { i.metricsCollector.recordSensitiveUnsupportedError() return "", ErrSensitiveUnsupported } resultString := result.String() if _, walkErr := expr.Walk(&interpolationDetector{}); errors.Is(walkErr, errInterpolationFound) { // Only count as an interpolation if at least one interpolation was actually present in the AST i.metricsCollector.recordSuccess() } return resultString, nil } func ExpandInputs(inputs *Inputs, v any) error { rv := reflect.ValueOf(v) if rv.Kind() == reflect.Ptr { rv = rv.Elem() } if rv.Kind() != reflect.Struct { return fmt.Errorf("expected struct, got %s", rv.Kind()) } err := processStruct(inputs, rv) if err != nil { e := &InputInterpolationError{} if errors.As(err, &e) { return e } return err } return nil } //nolint:gocognit func processStruct(inputs *Inputs, rv reflect.Value) error { err := tryExpanderInterface(inputs, rv) switch { case errors.Is(err, errInputExpanderNotSupported): case err != nil: return err default: // Successfully expanded using the interface return nil } rt := rv.Type() for i := 0; i < rv.NumField(); i++ { field := rv.Field(i) if !field.CanInterface() { continue } fieldType := rt.Field(i) inputsTag := fieldType.Tag.Get("inputs") if inputsTag != "expand" { continue } err := tryExpanderInterface(inputs, field) switch { case errors.Is(err, errInputExpanderNotSupported): case err != nil: return err default: // Successfully expanded using the interface continue } switch field.Kind() { case reflect.String: if err := expandStringField(inputs, field); err != nil { return fmt.Errorf("failed to expand string field %s: %w", fieldType.Name, err) } case reflect.Struct: if err := processStruct(inputs, field); err != nil { return fmt.Errorf("failed to process struct field %s: %w", fieldType.Name, err) } case reflect.Slice: if err := expandSlice(inputs, field); err != nil { return fmt.Errorf("failed to expand slice field %s: %w", fieldType.Name, err) } default: return fmt.Errorf("field %s has inputs:expand tag but is neither string-based nor struct (type: %s)", fieldType.Name, field.Type()) } } return nil } func tryExpanderInterface(inputs *Inputs, field reflect.Value) error { var fieldInterface any // We need to get the address if possible since methods might be on pointer receiver if field.CanAddr() { fieldInterface = field.Addr().Interface() } else { fieldInterface = field.Interface() } expander, ok := fieldInterface.(InputExpander) if !ok { return errInputExpanderNotSupported } return expander.Expand(inputs) } // expandStringField expands a string-based field func expandStringField(inputs *Inputs, field reflect.Value) error { if !field.CanAddr() { return errors.New("field is not addressable") } if !field.CanSet() { return errors.New("field is not settable") } expandedValue, err := inputs.Expand(field.String()) if err != nil { return err } field.SetString(expandedValue) return nil } func expandSlice(inputs *Inputs, field reflect.Value) error { if field.Len() == 0 { return nil } elemType := field.Type().Elem() switch elemType.Kind() { case reflect.String: return expandStringSlice(inputs, field) case reflect.Struct: return expandStructSlice(inputs, field) default: return fmt.Errorf("slice elements must be either strings or structs (element type: %s)", elemType) } } func expandStringSlice(inputs *Inputs, field reflect.Value) error { for i := range field.Len() { elem := field.Index(i) if err := expandStringField(inputs, elem); err != nil { return fmt.Errorf("failed to expand element %d: %w", i, err) } } return nil } func expandStructSlice(inputs *Inputs, field reflect.Value) error { for i := range field.Len() { elem := field.Index(i) if err := processStruct(inputs, elem); err != nil { return fmt.Errorf("failed to process struct element %d: %w", i, err) } } return nil } ================================================ FILE: common/spec/inputs_metrics.go ================================================ package spec import "github.com/prometheus/client_golang/prometheus" const ( // Error type labels for interpolation failures interpolationErrorTypeParse = "parse" interpolationErrorTypeEvaluation = "evaluation" interpolationErrorTypeSensitiveUnsupported = "sensitive_unsupported" ) type JobInputsMetricsCollector struct { interpolations prometheus.Counter interpolationFailures *prometheus.CounterVec } func NewJobInputsMetricsCollector() *JobInputsMetricsCollector { return &JobInputsMetricsCollector{ interpolations: prometheus.NewCounter(prometheus.CounterOpts{ Name: "gitlab_runner_job_inputs_interpolations_total", Help: "Total number of job input interpolations where expressions were actually used (output differs from input)", }), interpolationFailures: prometheus.NewCounterVec( prometheus.CounterOpts{ Name: "gitlab_runner_job_inputs_interpolation_failures_total", Help: "Total number of failed job input interpolations, labeled by error type", }, []string{"error_type"}, ), } } // Describe implements prometheus.Collector. func (c *JobInputsMetricsCollector) Describe(descs chan<- *prometheus.Desc) { c.interpolations.Describe(descs) c.interpolationFailures.Describe(descs) } // Collect implements prometheus.Collector. func (c *JobInputsMetricsCollector) Collect(metrics chan<- prometheus.Metric) { c.interpolations.Collect(metrics) c.interpolationFailures.Collect(metrics) } // recordSuccess increments the successful interpolations counter func (c *JobInputsMetricsCollector) recordSuccess() { if c == nil { return } c.interpolations.Inc() } // recordParseError increments the parse error counter func (c *JobInputsMetricsCollector) recordParseError() { if c == nil { return } c.interpolationFailures.WithLabelValues(interpolationErrorTypeParse).Inc() } // recordEvalError increments the evaluation error counter func (c *JobInputsMetricsCollector) recordEvalError() { if c == nil { return } c.interpolationFailures.WithLabelValues(interpolationErrorTypeEvaluation).Inc() } // recordSensitiveUnsupportedError increments the sensitive input error counter func (c *JobInputsMetricsCollector) recordSensitiveUnsupportedError() { if c == nil { return } c.interpolationFailures.WithLabelValues(interpolationErrorTypeSensitiveUnsupported).Inc() } ================================================ FILE: common/spec/inputs_metrics_test.go ================================================ //go:build !integration package spec import ( "fmt" "testing" "github.com/prometheus/client_golang/prometheus" dto "github.com/prometheus/client_model/go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) var ( _ prometheus.Collector = (*JobInputsMetricsCollector)(nil) ) func TestJobInputsInterpolationMetrics(t *testing.T) { testMetrics := NewJobInputsMetricsCollector() t.Run("tracks successful interpolation when output differs", func(t *testing.T) { inputs := prepareTestInputs(t, `[ { "key": "name", "value": { "type": "string", "content": "John", "sensitive": false } } ]`) inputs.SetMetricsCollector(testMetrics) beforeCount := getCounterValue(t, testMetrics.interpolations) result, err := inputs.Expand("Hello ${{ job.inputs.name }}") require.NoError(t, err) assert.Equal(t, "Hello John", result) afterCount := getCounterValue(t, testMetrics.interpolations) assert.Equal(t, beforeCount+1, afterCount, "should increment interpolations counter") }) t.Run("does not track when output is same as input", func(t *testing.T) { inputs := prepareTestInputs(t, `[ { "key": "name", "value": { "type": "string", "content": "John", "sensitive": false } } ]`) inputs.SetMetricsCollector(testMetrics) beforeCount := getCounterValue(t, testMetrics.interpolations) result, err := inputs.Expand("Hello World") require.NoError(t, err) assert.Equal(t, "Hello World", result) afterCount := getCounterValue(t, testMetrics.interpolations) assert.Equal(t, beforeCount, afterCount, "should not increment interpolations counter when no expression is used") }) t.Run("tracks errors", func(t *testing.T) { tests := []struct { inputs string typ string text string }{ { inputs: ` [ { "key": "name", "value": { "type": "string", "content": "John", "sensitive": false } } ] `, typ: interpolationErrorTypeParse, text: "Hello ${{ job.inputs.name + }}", }, { inputs: ` [ { "key": "name", "value": { "type": "string", "content": "John", "sensitive": false } } ] `, typ: interpolationErrorTypeEvaluation, text: "Hello ${{ job.inputs.nonexistent }}", }, { inputs: ` [ { "key": "name", "value": { "type": "string", "content": "John", "sensitive": true } } ] `, typ: interpolationErrorTypeSensitiveUnsupported, text: "Hello ${{ job.inputs.name }}", }, } for _, tt := range tests { t.Run(fmt.Sprintf("error type %s", tt.typ), func(t *testing.T) { inputs := prepareTestInputs(t, tt.inputs) inputs.SetMetricsCollector(testMetrics) beforeCount := getCounterValueWithLabel(t, testMetrics.interpolationFailures, tt.typ) _, err := inputs.Expand(tt.text) require.Error(t, err) afterCount := getCounterValueWithLabel(t, testMetrics.interpolationFailures, tt.typ) assert.Equal(t, beforeCount+1, afterCount) }) } }) } func prepareTestInputs(t *testing.T, jsonData string) *Inputs { t.Helper() var inputs Inputs err := inputs.UnmarshalJSON([]byte(jsonData)) require.NoError(t, err) return &inputs } func getCounterValue(t *testing.T, counter prometheus.Counter) float64 { t.Helper() metric := &dto.Metric{} err := counter.Write(metric) require.NoError(t, err) return metric.Counter.GetValue() } func getCounterValueWithLabel(t *testing.T, counterVec *prometheus.CounterVec, labelValue string) float64 { t.Helper() counter := counterVec.WithLabelValues(labelValue) return getCounterValue(t, counter) } ================================================ FILE: common/spec/inputs_test.go ================================================ //go:build !integration package spec import ( "encoding/json" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/moa/value" ) var ( _ value.Mapper = (*Inputs)(nil) ) // TODO: to be replaced, but used here for quick testing // From: https://gitlab.com/gitlab-org/gitlab/-/issues/543972 // NOTE: all non-string and sensitive inputs have been removed. const complexExampleInputs = ` [ { "key": "username", "value": { "type": "string", "content": "fred", "sensitive": false } }, { "key": "fullname", "value": { "type": "string", "content": "fred tester", "sensitive": false } }, { "key": "password", "value": { "type": "string", "content": "123456", "sensitive": true } }, { "key": "age", "value": { "type": "number", "content": 1, "sensitive": false } }, { "key": "likes_spaghetti", "value": { "type": "boolean", "content": false, "sensitive": false } }, { "key": "friends", "value": { "type": "array", "content": [ "bob", "sally" ], "sensitive": false } }, { "key": "address", "value": { "type": "struct", "content": { "line1": "42 Wallaby Way", "line2": "Sydney" }, "sensitive": false } } ] ` func TestJobInputs_Unmarshalling(t *testing.T) { t.Parallel() inputData := []byte(complexExampleInputs) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) assert.Equal(t, 7, inputs.Len()) keys := make([]string, 0, inputs.Len()) for i := range inputs.Keys() { keys = append(keys, i.String()) } assert.ElementsMatch(t, []string{"username", "fullname", "password", "age", "likes_spaghetti", "friends", "address"}, keys) } func TestJobInputs_Unmarshalling_Sensitive(t *testing.T) { t.Parallel() inputData := []byte(` [ { "key": "anykey-implicit-no-sensitive", "value": { "type": "string", "content": "any" } }, { "key": "anykey-explicit-no-sensitive", "value": { "type": "string", "content": "any", "sensitive": false } }, { "key": "anykey-explicit-sensitive", "value": { "type": "string", "content": "any", "sensitive": true } } ] `) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) assert.Equal(t, 3, inputs.Len()) assert.False(t, inputs.inputs[0].Sensitive()) assert.False(t, inputs.inputs[1].Sensitive()) assert.True(t, inputs.inputs[2].Sensitive()) } func TestJobInputs_Unmarshalling_Error(t *testing.T) { t.Parallel() tests := []struct { name string inputData []byte expectedError string }{ { name: "empty input", inputData: []byte(` [ {} ] `), expectedError: `input without key`, }, { name: "input without value", inputData: []byte(` [ { "key": "anykey" } ] `), expectedError: `input "anykey" is null, must have valid value`, }, { name: "input with empty value", inputData: []byte(` [ { "key": "anykey", "value": {} } ] `), expectedError: `input "anykey" is null, must have valid value`, }, { name: "input without type", inputData: []byte(` [ { "key": "anykey", "value": { "content": "any" } } ] `), expectedError: `invalid type "" for input "anykey": type is unknown`, }, { name: "input without content", inputData: []byte(` [ { "key": "anykey", "value": { "type": "string" } } ] `), expectedError: `input "anykey" is null, must have valid value`, }, { name: "input with invalid type", inputData: []byte(` [ { "key": "anykey", "value": { "type": "unexisting-type", "content": "any" } } ] `), expectedError: `invalid type "unexisting-type" for input "anykey": type is unknown`, }, { name: "input with mismatching type", inputData: []byte(` [ { "key": "anykey", "value": { "type": "number", "content": "any" } } ] `), expectedError: `mismatching type of input "anykey". Announced "number", but got "string"`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() inputs := Inputs{} err := json.Unmarshal(tt.inputData, &inputs) assert.EqualError(t, err, tt.expectedError) }) } } func TestJobInputs_Expand_string(t *testing.T) { t.Parallel() inputData := []byte(complexExampleInputs) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) expanded, err := inputs.Expand("Hello ${{ job.inputs.username }}, your fullname is ${{ job.inputs.fullname }}") require.NoError(t, err) assert.Equal(t, "Hello fred, your fullname is fred tester", expanded) } func TestJobInputs_Expand_sensitive_string_reject(t *testing.T) { t.Parallel() inputData := []byte(complexExampleInputs) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) _, err = inputs.Expand("Hello ${{ job.inputs.username }}, your password is ${{ job.inputs.password }}") require.ErrorIs(t, err, ErrSensitiveUnsupported) } func TestJobInputs_Expand_nonstring(t *testing.T) { t.Parallel() inputData := []byte(complexExampleInputs) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) expanded, err := inputs.Expand("Hello ${{ job.inputs.username }}, your age is ${{ str(job.inputs.age) }}") require.NoError(t, err) assert.Equal(t, "Hello fred, your age is 1", expanded) } func TestJobInputs_Expand_ArrayElement(t *testing.T) { t.Parallel() inputData := []byte(` [ { "key": "field", "value": { "type": "array", "content": ["one", "two", "three"] } } ] `) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) expanded, err := inputs.Expand("Field is ${{ job.inputs.field[1] }}") require.NoError(t, err) assert.Equal(t, "Field is two", expanded) } func TestJobInputs_Expand_StructField(t *testing.T) { t.Parallel() inputData := []byte(` [ { "key": "field", "value": { "type": "struct", "content": { "line1": "Streetname 1", "line2": "1234 ..." }, "sensitive": false } } ] `) inputs := Inputs{} err := json.Unmarshal(inputData, &inputs) require.NoError(t, err) expanded, err := inputs.Expand("Field is ${{ job.inputs.field.line1 }}") require.NoError(t, err) assert.Equal(t, "Field is Streetname 1", expanded) } type customInputExpander string func (c *customInputExpander) Expand(inputs *Inputs) error { *c = "REDACTED" return nil } func TestInputsTag(t *testing.T) { type MyString string type JobResponse struct { StringToExpand string `inputs:"expand"` StringNotToExpand string CustomStringToExpand MyString `inputs:"expand"` CustomStringNotToExpand MyString StructToExpand struct { StringToExpand string `inputs:"expand"` StringNotToExpand string } `inputs:"expand"` StructNotToExpand struct { StringToExpand string `inputs:"expand"` StringNotToExpand string } SliceToExpand []string `inputs:"expand"` SliceNotToExpand []string CustomInputExpanderToExpand customInputExpander `inputs:"expand"` CustomInputExpanderNotToExpand customInputExpander } jobResponse := JobResponse{ StringToExpand: "${{ job.inputs.any }}", StringNotToExpand: "${{ job.inputs.any }}", CustomStringToExpand: "${{ job.inputs.any }}", CustomStringNotToExpand: "${{ job.inputs.any }}", StructToExpand: struct { StringToExpand string "inputs:\"expand\"" StringNotToExpand string }{ StringToExpand: "${{ job.inputs.any }}", StringNotToExpand: "${{ job.inputs.any }}", }, StructNotToExpand: struct { StringToExpand string "inputs:\"expand\"" StringNotToExpand string }{ StringToExpand: "${{ job.inputs.any }}", StringNotToExpand: "${{ job.inputs.any }}", }, SliceToExpand: []string{"${{ job.inputs.any }}", "${{ job.inputs.any }}"}, SliceNotToExpand: []string{"${{ job.inputs.any }}", "${{ job.inputs.any }}"}, CustomInputExpanderToExpand: "${{ job.inputs.any }}", CustomInputExpanderNotToExpand: "${{ job.inputs.any }}", } inputs, err := NewJobInputs([]JobInput{ { Key: "any", Value: JobInputValue{ Type: JobInputContentTypeNameString, Content: value.String("value"), Sensitive: false, }, }, }) require.NoError(t, err) err = ExpandInputs(&inputs, &jobResponse) require.NoError(t, err) assert.Equal(t, "value", jobResponse.StringToExpand) assert.Equal(t, "${{ job.inputs.any }}", jobResponse.StringNotToExpand) assert.Equal(t, MyString("value"), jobResponse.CustomStringToExpand) assert.Equal(t, MyString("${{ job.inputs.any }}"), jobResponse.CustomStringNotToExpand) assert.Equal(t, "value", jobResponse.StructToExpand.StringToExpand) assert.Equal(t, "${{ job.inputs.any }}", jobResponse.StructToExpand.StringNotToExpand) assert.Equal(t, "${{ job.inputs.any }}", jobResponse.StructNotToExpand.StringToExpand) assert.Equal(t, "${{ job.inputs.any }}", jobResponse.StructNotToExpand.StringNotToExpand) assert.Equal(t, []string{"value", "value"}, jobResponse.SliceToExpand) assert.Equal(t, []string{"${{ job.inputs.any }}", "${{ job.inputs.any }}"}, jobResponse.SliceNotToExpand) assert.Equal(t, customInputExpander("REDACTED"), jobResponse.CustomInputExpanderToExpand) assert.Equal(t, customInputExpander("${{ job.inputs.any }}"), jobResponse.CustomInputExpanderNotToExpand) } func TestJobInputs_Expand_NoInputsDefined(t *testing.T) { t.Parallel() tests := []struct { name string text string expected string }{ { name: "no job input access, invalid moa syntax", text: "Hello $", expected: "Hello $", }, { name: "no job input access", text: "Hello ${{ 1 + 2 }}", expected: "Hello ${{ 1 + 2 }}", }, { name: "with job input access", text: "Hello ${{ job.inputs.username }}", expected: "Hello ${{ job.inputs.username }}", }, { name: "plain text", text: "Hello world", expected: "Hello world", }, { name: "other selector", text: "${{ foo.bar.baz }}", expected: "${{ foo.bar.baz }}", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() inputs := Inputs{} inputs.SetMetricsCollector(NewJobInputsMetricsCollector()) expanded, err := inputs.Expand(tt.text) require.NoError(t, err) assert.Equal(t, tt.expected, expanded) }) } } ================================================ FILE: common/spec/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package spec import ( mock "github.com/stretchr/testify/mock" ) // NewMockInputExpander creates a new instance of MockInputExpander. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockInputExpander(t interface { mock.TestingT Cleanup(func()) }) *MockInputExpander { mock := &MockInputExpander{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockInputExpander is an autogenerated mock type for the InputExpander type type MockInputExpander struct { mock.Mock } type MockInputExpander_Expecter struct { mock *mock.Mock } func (_m *MockInputExpander) EXPECT() *MockInputExpander_Expecter { return &MockInputExpander_Expecter{mock: &_m.Mock} } // Expand provides a mock function for the type MockInputExpander func (_mock *MockInputExpander) Expand(inputs *Inputs) error { ret := _mock.Called(inputs) if len(ret) == 0 { panic("no return value specified for Expand") } var r0 error if returnFunc, ok := ret.Get(0).(func(*Inputs) error); ok { r0 = returnFunc(inputs) } else { r0 = ret.Error(0) } return r0 } // MockInputExpander_Expand_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Expand' type MockInputExpander_Expand_Call struct { *mock.Call } // Expand is a helper method to define mock.On call // - inputs *Inputs func (_e *MockInputExpander_Expecter) Expand(inputs interface{}) *MockInputExpander_Expand_Call { return &MockInputExpander_Expand_Call{Call: _e.mock.On("Expand", inputs)} } func (_c *MockInputExpander_Expand_Call) Run(run func(inputs *Inputs)) *MockInputExpander_Expand_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 *Inputs if args[0] != nil { arg0 = args[0].(*Inputs) } run( arg0, ) }) return _c } func (_c *MockInputExpander_Expand_Call) Return(err error) *MockInputExpander_Expand_Call { _c.Call.Return(err) return _c } func (_c *MockInputExpander_Expand_Call) RunAndReturn(run func(inputs *Inputs) error) *MockInputExpander_Expand_Call { _c.Call.Return(run) return _c } ================================================ FILE: common/spec/spec.go ================================================ package spec import ( "encoding/json" "errors" "fmt" "slices" "sort" "strconv" "strings" url_helpers "gitlab.com/gitlab-org/gitlab-runner/helpers/url" "gitlab.com/gitlab-org/gitlab-runner/helpers/vault/auth_methods" "gitlab.com/gitlab-org/step-runner/schema/v1" ) type JobFailureReason string func (r JobFailureReason) String() string { return string(r) } type JobInfo struct { Name string `json:"name"` Stage string `json:"stage"` PipelineID int64 `json:"pipeline_id"` ProjectID int64 `json:"project_id"` ProjectName string `json:"project_name"` ProjectFullPath string `json:"project_full_path"` NamespaceID int64 `json:"namespace_id"` RootNamespaceID int64 `json:"root_namespace_id"` OrganizationID int64 `json:"organization_id"` InstanceID string `json:"instance_id"` InstanceUUID string `json:"instance_uuid"` UserID int64 `json:"user_id"` ScopedUserID *int64 `json:"scoped_user_id,omitempty"` TimeInQueueSeconds float64 `json:"time_in_queue_seconds"` ProjectJobsRunningOnInstanceRunnersCount string `json:"project_jobs_running_on_instance_runners_count"` QueueSize int64 `json:"queue_size"` QueueDepth int64 `json:"queue_depth"` } type GitInfoRefType string const ( RefTypeBranch GitInfoRefType = "branch" RefTypeTag GitInfoRefType = "tag" ) type GitInfo struct { RepoURL string `json:"repo_url"` RepoObjectFormat string `json:"repo_object_format"` Ref string `json:"ref"` Sha string `json:"sha"` BeforeSha string `json:"before_sha"` RefType GitInfoRefType `json:"ref_type"` Refspecs []string `json:"refspecs"` Depth int `json:"depth"` Protected *bool `json:"protected"` } type Variable struct { Key string `json:"key"` Value string `json:"value"` Public bool `json:"public"` Internal bool `json:"-"` File bool `json:"file"` Masked bool `json:"masked"` Raw bool `json:"raw"` } // RunnerInfo contains runner-specific metadata sent as part of the job payload. type RunnerInfo struct { Timeout int `json:"timeout"` } type StepScript []string type StepName string const ( StepNameRun StepName = "run" StepNameScript StepName = "script" StepNameAfterScript StepName = "after_script" ) type StepWhen string const ( StepWhenOnFailure StepWhen = "on_failure" StepWhenOnSuccess StepWhen = "on_success" StepWhenAlways StepWhen = "always" ) type CachePolicy string const ( CachePolicyUndefined CachePolicy = "" CachePolicyPullPush CachePolicy = "pull-push" CachePolicyPull CachePolicy = "pull" CachePolicyPush CachePolicy = "push" ) type Step struct { Name StepName `json:"name"` Script StepScript `json:"script" inputs:"expand"` Timeout int `json:"timeout"` When StepWhen `json:"when"` AllowFailure bool `json:"allow_failure"` } func (s *Step) Expand(inputs *Inputs) error { switch s.Name { case StepNameScript: case StepNameAfterScript: default: // Step name not supported return nil } type alias Step return ExpandInputs(inputs, (*alias)(s)) } type Steps []Step type ( UnsuportedExecutorOptionsError struct { executor, section string unsupportedOptions, supportedOptions []string } executorOptions struct { unsupportedOptions error } ) func (ueoe *UnsuportedExecutorOptionsError) Error() string { return fmt.Sprintf("Unsupported %q options %v for %q; supported options are %v", ueoe.section, ueoe.unsupportedOptions, ueoe.executor, ueoe.supportedOptions) } func (eo *executorOptions) validate(data []byte, supportedOptions []string, executor, section string) error { options := map[string]any{} if err := json.Unmarshal(data, &options); err != nil { // this can't happen return nil } notSupported := []string{} for opt := range options { if !slices.Contains(supportedOptions, opt) { notSupported = append(notSupported, opt) } } if len(notSupported) != 0 { sort.Strings(supportedOptions) return &UnsuportedExecutorOptionsError{ executor: executor, section: section, unsupportedOptions: notSupported, supportedOptions: supportedOptions, } } return nil } func (eo *executorOptions) UnsupportedOptions() error { return eo.unsupportedOptions } var supportedExecutorOptions = map[string][]string{ "docker": {"platform", "user"}, "kubernetes": {"user"}, } type ( ImageDockerOptions struct { executorOptions Platform string `json:"platform" inputs:"expand"` User StringOrInt64 `json:"user" inputs:"expand"` } StringOrInt64 string ImageKubernetesOptions struct { executorOptions User StringOrInt64 `json:"user" inputs:"expand"` } ImageExecutorOptions struct { executorOptions Docker ImageDockerOptions `json:"docker,omitempty" inputs:"expand"` Kubernetes ImageKubernetesOptions `json:"kubernetes,omitempty" inputs:"expand"` } ) func mapKeys[K comparable, V any](m map[K]V) []K { keys := make([]K, 0, len(m)) for k := range m { keys = append(keys, k) } return keys } func (ido *ImageDockerOptions) UnmarshalJSON(data []byte) error { type imageDockerOptions ImageDockerOptions inner := imageDockerOptions{} if err := json.Unmarshal(data, &inner); err != nil { return err } *ido = ImageDockerOptions(inner) // call validate after json.Unmarshal so the former handles bad json. ido.unsupportedOptions = ido.validate(data, supportedExecutorOptions["docker"], "docker executor", "image") return nil } func (ido *ImageDockerOptions) Expand(vars Variables) ImageDockerOptions { return ImageDockerOptions{ Platform: vars.ExpandValue(ido.Platform), User: StringOrInt64(vars.ExpandValue(string(ido.User))), } } func (iko *ImageKubernetesOptions) UnmarshalJSON(data []byte) error { type imageKubernetesOptions ImageKubernetesOptions inner := imageKubernetesOptions{} if err := json.Unmarshal(data, &inner); err != nil { return err } *iko = ImageKubernetesOptions(inner) // call validate after json.Unmarshal so the former handles bad json. iko.unsupportedOptions = iko.validate(data, supportedExecutorOptions["kubernetes"], "kubernetes executor", "image") return nil } func (iko *ImageKubernetesOptions) Expand(vars Variables) ImageKubernetesOptions { return ImageKubernetesOptions{ User: StringOrInt64(vars.ExpandValue(string(iko.User))), } } func (iko *ImageKubernetesOptions) GetUIDGID() (int64, int64, error) { if iko.User == "" { return 0, 0, nil } user, group, ok := strings.Cut(string(iko.User), ":") uid, err := strconv.ParseInt(user, 10, 64) if err != nil { return 0, 0, fmt.Errorf("failed to parse UID %w", err) } var gid int64 if ok { gid, err = strconv.ParseInt(group, 10, 64) if err != nil { return 0, 0, fmt.Errorf("failed to parse GID %w", err) } } return uid, gid, err } func (si *StringOrInt64) UnmarshalJSON(data []byte) error { var s string if err := json.Unmarshal(data, &s); err == nil { *si = StringOrInt64(s) return nil } var i int64 if err := json.Unmarshal(data, &i); err == nil { *si = StringOrInt64(strconv.FormatInt(i, 10)) return nil } return fmt.Errorf("StringOrInt: input not string or integer") } func (ieo *ImageExecutorOptions) UnmarshalJSON(data []byte) error { type imageExecutorOptions ImageExecutorOptions inner := imageExecutorOptions{} if err := json.Unmarshal(data, &inner); err != nil { return err } *ieo = ImageExecutorOptions(inner) // call validate after json.Unmarshal so the former handles bad json. ieo.unsupportedOptions = ieo.validate(data, mapKeys(supportedExecutorOptions), "executor_opts", "image") return nil } func (ieo *ImageExecutorOptions) UnsupportedOptions() error { return errors.Join( ieo.executorOptions.UnsupportedOptions(), ieo.Docker.UnsupportedOptions(), ieo.Kubernetes.UnsupportedOptions(), ) } type PullPolicy string type Image struct { Name string `json:"name" inputs:"expand"` Alias string `json:"alias,omitempty"` Command []string `json:"command,omitempty" inputs:"expand"` Entrypoint []string `json:"entrypoint,omitempty" inputs:"expand"` Ports []Port `json:"ports,omitempty"` Variables Variables `json:"variables,omitempty"` PullPolicies []PullPolicy `json:"pull_policy,omitempty" inputs:"expand"` ExecutorOptions ImageExecutorOptions `json:"executor_opts,omitempty" inputs:"expand"` } func (i *Image) Aliases() []string { return strings.Fields(strings.ReplaceAll(i.Alias, ",", " ")) } func (i *Image) UnsupportedOptions() error { return i.ExecutorOptions.UnsupportedOptions() } type Port struct { Number int `json:"number,omitempty"` Protocol string `json:"protocol,omitempty"` Name string `json:"name,omitempty"` } type Services []Image func (s Services) UnsupportedOptions() error { errs := make([]error, 0, len(s)) for _, i := range s { errs = append(errs, i.UnsupportedOptions()) } return errors.Join(errs...) } type ArtifactPaths []string type ArtifactExclude []string type ArtifactWhen string const ( ArtifactWhenOnFailure ArtifactWhen = "on_failure" ArtifactWhenOnSuccess ArtifactWhen = "on_success" ArtifactWhenAlways ArtifactWhen = "always" ) func (when ArtifactWhen) OnSuccess() bool { return when == "" || when == ArtifactWhenOnSuccess || when == ArtifactWhenAlways } func (when ArtifactWhen) OnFailure() bool { return when == ArtifactWhenOnFailure || when == ArtifactWhenAlways } type ArtifactFormat string const ( ArtifactFormatDefault ArtifactFormat = "" ArtifactFormatZip ArtifactFormat = "zip" ArtifactFormatGzip ArtifactFormat = "gzip" ArtifactFormatRaw ArtifactFormat = "raw" ArtifactFormatZipZstd ArtifactFormat = "zipzstd" ArtifactFormatTarZstd ArtifactFormat = "tarzstd" ) type Artifact struct { Name string `json:"name" inputs:"expand"` Untracked bool `json:"untracked"` Paths ArtifactPaths `json:"paths" inputs:"expand"` Exclude ArtifactExclude `json:"exclude" inputs:"expand"` When ArtifactWhen `json:"when" inputs:"expand"` Type string `json:"artifact_type"` Format ArtifactFormat `json:"artifact_format"` ExpireIn string `json:"expire_in" inputs:"expand"` } type Artifacts []Artifact type PolicyOptions struct { PolicyJob bool `json:"execution_policy_job"` Name string `json:"policy_name"` VariableOverrideAllowed *bool `json:"policy_variables_override_allowed,omitempty"` VariableOverrideExceptions []string `json:"policy_variables_override_exceptions,omitempty"` } type Cache struct { Key string `json:"key" inputs:"expand"` Untracked bool `json:"untracked"` Policy CachePolicy `json:"policy" inputs:"expand"` Paths ArtifactPaths `json:"paths" inputs:"expand"` When CacheWhen `json:"when" inputs:"expand"` FallbackKeys CacheFallbackKeys `json:"fallback_keys" inputs:"expand"` } type ( CacheWhen string CacheFallbackKeys []string ) const ( CacheWhenOnFailure CacheWhen = "on_failure" CacheWhenOnSuccess CacheWhen = "on_success" CacheWhenAlways CacheWhen = "always" ) func (when CacheWhen) ShouldCache(jobSuccess bool) bool { if jobSuccess { return when.OnSuccess() } return when.OnFailure() } func (when CacheWhen) OnSuccess() bool { return when == "" || when == CacheWhenOnSuccess || when == CacheWhenAlways } func (when CacheWhen) OnFailure() bool { return when == CacheWhenOnFailure || when == CacheWhenAlways } func (c Cache) CheckPolicy(wanted CachePolicy) (bool, error) { switch c.Policy { case CachePolicyUndefined, CachePolicyPullPush: return true, nil case CachePolicyPull, CachePolicyPush: return wanted == c.Policy, nil } return false, fmt.Errorf("unknown cache policy %s", c.Policy) } type Caches []Cache type Credentials struct { Type string `json:"type"` URL string `json:"url"` Username string `json:"username"` Password string `json:"password"` } type DependencyArtifactsFile struct { Filename string `json:"filename"` Size int64 `json:"size"` } type Dependency struct { ID int64 `json:"id"` Token string `json:"token"` Name string `json:"name"` ArtifactsFile DependencyArtifactsFile `json:"artifacts_file"` } type Dependencies []Dependency type Tracing struct { TraceID string `json:"trace_id"` SpanParentID string `json:"span_parent_id"` OTELEndpoints []OTELEndpoint `json:"otel_endpoints"` } type OTELEndpoint struct { URL string `json:"url"` Auth *OTELEndpointAuth `json:"auth"` } type OTELEndpointAuth struct { Type string `json:"type"` HTTPBearerGCPOIDC *HTTPBearerGCPOIDCAuth `json:"http_bearer_gcp_oidc"` } type HTTPBearerGCPOIDCAuth struct { Audience string `json:"audience"` } type GitlabFeatures struct { TraceSections bool `json:"trace_sections"` TokenMaskPrefixes []string `json:"token_mask_prefixes"` FailureReasons []JobFailureReason `json:"failure_reasons"` Tracing *Tracing `json:"tracing"` } type Hooks []Hook type Hook struct { Name HookName `json:"name"` Script StepScript `json:"script"` } type HookName string const ( HookPreGetSourcesScript HookName = "pre_get_sources_script" HookPostGetSourcesScript HookName = "post_get_sources_script" ) func (hooks Hooks) Get(name HookName) Hook { for _, hook := range hooks { if hook.Name == name { return hook } } return Hook{} } type TLSData struct { CAChain string `json:"-"` AuthCert string `json:"-"` AuthKey string `json:"-"` } type Job struct { ID int64 `json:"id"` Token string `json:"token"` AllowGitFetch bool `json:"allow_git_fetch"` JobInfo JobInfo `json:"job_info"` GitInfo GitInfo `json:"git_info"` RunnerInfo RunnerInfo `json:"runner_info"` Inputs Inputs `json:"inputs"` Variables Variables `json:"variables"` Steps Steps `json:"steps" inputs:"expand"` Image Image `json:"image" inputs:"expand"` Services Services `json:"services" inputs:"expand"` Artifacts Artifacts `json:"artifacts" inputs:"expand"` Cache Caches `json:"cache" inputs:"expand"` Credentials []Credentials `json:"credentials"` Dependencies Dependencies `json:"dependencies"` Features GitlabFeatures `json:"features"` Secrets Secrets `json:"secrets,omitempty"` Hooks Hooks `json:"hooks,omitempty"` Run Run `json:"run,omitempty"` PolicyOptions PolicyOptions `json:"policy_options,omitempty"` TLSData TLSData `json:"-"` JobRequestCorrelationID string `json:"-"` } type Run []schema.Step func (r *Run) UnmarshalJSON(data []byte) error { var s string if err := json.Unmarshal(data, &s); err != nil { return err } var run []schema.Step if err := json.Unmarshal([]byte(s), &run); err != nil { return err } *r = run return nil } // ValidateStepsJobRequest does the following: // 1. It detects if the JobRequest is requesting execution of the job via Steps. // 2. If yes, it ensures the request is a valid steps request, and // 3. It sets a default build image. // 4. It further determines if the request is a valid native steps execution request. // 5. If it is, it sets a new, native-steps specific script step and returns. // 6. If not, it configures the job to be run via the step shim approach. func (j *Job) ValidateStepsJobRequest(executorSupportsNativeSteps bool) error { switch { case len(j.Run) == 0: return nil case slices.ContainsFunc(j.Steps, func(step Step) bool { return len(step.Script) > 0 }): return fmt.Errorf("the `run` and `script` keywords cannot be used together") case j.Variables.Get("STEPS") != "": return fmt.Errorf("the `run` keyword requires the exclusive use of the variable STEPS") } if executorSupportsNativeSteps { // If the executor supports native step execution and the job was specified as steps, execute the job via native // steps integration. In other words, disallow executing the job in shim mode if the executor supports native // steps. // If native steps is enabled, the script steps won't be executed anyway, but this change ensures the job log // trace is coherent since it will print: Executing "step_run" stage of the job script j.Steps = Steps{{Name: StepNameRun}} return nil } // re-encode the run steps to a string for shim-mode runStr, _ := json.Marshal(j.Run) // Use the shim approach to run steps jobs. This shims GitLab Steps from the `run` keyword into the step-runner // image. This is a temporary mechanism for executing steps which will be replaced by a gRPC connection to // step-runner in each executor. j.Variables = append(j.Variables, Variable{ Key: "STEPS", Value: string(runStr), Raw: true, }) j.Steps = Steps{{ Name: StepNameScript, Script: StepScript{"step-runner ci"}, Timeout: 3600, When: "on_success", AllowFailure: false, }} return nil } type Secrets map[string]Secret type Secret struct { Vault *VaultSecret `json:"vault,omitempty"` GCPSecretManager *GCPSecretManagerSecret `json:"gcp_secret_manager,omitempty"` AzureKeyVault *AzureKeyVaultSecret `json:"azure_key_vault,omitempty"` AWSSecretsManager *AWSSecret `json:"aws_secrets_manager,omitempty"` GitLabSecretsManager *GitLabSecretsManagerSecret `json:"gitlab_secrets_manager,omitempty"` File *bool `json:"file,omitempty"` } func (s Secrets) ExpandVariables(vars Variables) { for _, secret := range s { secret.ExpandVariables(vars) } } func (s Secret) ExpandVariables(vars Variables) { if s.Vault != nil { s.Vault.expandVariables(vars) } if s.GCPSecretManager != nil { s.GCPSecretManager.expandVariables(vars) } if s.AzureKeyVault != nil { s.AzureKeyVault.expandVariables(vars) } if s.AWSSecretsManager != nil { s.AWSSecretsManager.expandVariables(vars) } // NOTE: GitLab Secrets Manager doesn't support variable expansion // The only user input from the CI config is the secret name which Rails // transforms into the path. Everything else is generated internally by Rails. } // IsFile defines whether the variable should be of type FILE or no. // // The default behavior is to represent the variable as FILE type. // If defined by the user - set to whatever was chosen. func (s Secret) IsFile() bool { if s.File == nil { return true } return *s.File } type GCPSecretManagerSecret struct { Name string `json:"name"` Version string `json:"version"` Server GCPSecretManagerServer `json:"server"` } type GCPSecretManagerServer struct { ProjectNumber string `json:"project_number"` WorkloadIdentityFederationPoolId string `json:"workload_identity_federation_pool_id"` WorkloadIdentityFederationProviderID string `json:"workload_identity_federation_provider_id"` JWT string `json:"jwt"` } type AWSSecret struct { SecretId string `json:"secret_id"` VersionId string `json:"version_id,omitempty"` VersionStage string `json:"version_stage,omitempty"` Field string `json:"field,omitempty"` Region string `json:"region,omitempty"` RoleARN string `json:"role_arn,omitempty"` RoleSessionName string `json:"role_session_name,omitempty"` Server AWSServer `json:"server,omitempty"` } type AWSServer struct { Region string `json:"region"` JWT string `json:"jwt,omitempty"` RoleArn string `json:"role_arn,omitempty"` RoleSessionName string `json:"role_session_name,omitempty"` } func (s *AWSSecret) expandVariables(vars Variables) { s.SecretId = vars.ExpandValue(s.SecretId) s.VersionId = vars.ExpandValue(s.VersionId) s.VersionStage = vars.ExpandValue(s.VersionStage) s.Field = vars.ExpandValue(s.Field) s.Region = vars.ExpandValue(s.Region) s.RoleARN = vars.ExpandValue(s.RoleARN) s.RoleSessionName = vars.ExpandValue(s.RoleSessionName) s.Server.expandVariables(vars) } func (s *AWSServer) expandVariables(vars Variables) { s.JWT = vars.ExpandValue(s.JWT) s.Region = vars.ExpandValue(s.Region) s.RoleArn = vars.ExpandValue(s.RoleArn) if s.RoleSessionName == "" { s.RoleSessionName = "${CI_JOB_ID}-${CI_PROJECT_ID}-${CI_SERVER_HOST}" } s.RoleSessionName = vars.ExpandValue(s.RoleSessionName) if len(s.RoleSessionName) > 64 { s.RoleSessionName = s.RoleSessionName[:64] } } func (s *GCPSecretManagerSecret) expandVariables(vars Variables) { s.Name = vars.ExpandValue(s.Name) s.Version = vars.ExpandValue(s.Version) s.Server.expandVariables(vars) } func (s *GCPSecretManagerServer) expandVariables(vars Variables) { s.ProjectNumber = vars.ExpandValue(s.ProjectNumber) s.WorkloadIdentityFederationPoolId = vars.ExpandValue(s.WorkloadIdentityFederationPoolId) s.WorkloadIdentityFederationProviderID = vars.ExpandValue(s.WorkloadIdentityFederationProviderID) s.JWT = vars.ExpandValue(s.JWT) } type AzureKeyVaultSecret struct { Name string `json:"name"` Version string `json:"version,omitempty"` Server AzureKeyVaultServer `json:"server"` } type AzureKeyVaultServer struct { ClientID string `json:"client_id"` TenantID string `json:"tenant_id"` JWT string `json:"jwt"` URL string `json:"url"` } func (s *AzureKeyVaultSecret) expandVariables(vars Variables) { s.Server.expandVariables(vars) s.Name = vars.ExpandValue(s.Name) s.Version = vars.ExpandValue(s.Version) } func (s *AzureKeyVaultServer) expandVariables(vars Variables) { s.JWT = vars.ExpandValue(s.JWT) } type VaultSecret struct { Server VaultServer `json:"server"` Engine VaultEngine `json:"engine"` Path string `json:"path"` Field string `json:"field"` } type VaultServer struct { URL string `json:"url"` Auth VaultAuth `json:"auth"` Namespace string `json:"namespace"` } type VaultAuth struct { Name string `json:"name"` Path string `json:"path"` Data VaultAuthData `json:"data"` } type VaultAuthData map[string]interface{} type VaultEngine struct { Name string `json:"name"` Path string `json:"path"` } func (s *VaultSecret) expandVariables(vars Variables) { s.Server.expandVariables(vars) s.Engine.expandVariables(vars) s.Path = vars.ExpandValue(s.Path) s.Field = vars.ExpandValue(s.Field) } func (s *VaultSecret) AuthName() string { return s.Server.Auth.Name } func (s *VaultSecret) AuthPath() string { return s.Server.Auth.Path } func (s *VaultSecret) AuthData() auth_methods.Data { return auth_methods.Data(s.Server.Auth.Data) } func (s *VaultSecret) EngineName() string { return s.Engine.Name } func (s *VaultSecret) EnginePath() string { return s.Engine.Path } func (s *VaultSecret) SecretPath() string { return s.Path } func (s *VaultSecret) SecretField() string { return s.Field } func (s *VaultServer) expandVariables(vars Variables) { s.URL = vars.ExpandValue(s.URL) s.Namespace = vars.ExpandValue(s.Namespace) s.Auth.expandVariables(vars) } func (a *VaultAuth) expandVariables(vars Variables) { a.Name = vars.ExpandValue(a.Name) a.Path = vars.ExpandValue(a.Path) for field, value := range a.Data { a.Data[field] = vars.ExpandValue(fmt.Sprintf("%s", value)) } } func (e *VaultEngine) expandVariables(vars Variables) { e.Name = vars.ExpandValue(e.Name) e.Path = vars.ExpandValue(e.Path) } // GitLabSecretsManagerSecret represents a secret configuration for GitLab's native // secrets management system using OpenBao as the backend. type GitLabSecretsManagerSecret struct { Server GitLabSecretsManagerServer `json:"server"` Engine GitLabSecretsManagerEngine `json:"engine"` Path string `json:"path"` Field string `json:"field"` } // GitLabSecretsManagerServer contains the configuration for connecting to the // OpenBao server and authenticating via JWT. type GitLabSecretsManagerServer struct { URL string `json:"url"` InlineAuth GitLabSecretsManagerServerInlineAuth `json:"inline_auth"` } // GitLabSecretsManagerServerInlineAuth holds the inline authentication configuration // for OpenBao JWT authentication. This allows the runner to authenticate on each // request without storing tokens. type GitLabSecretsManagerServerInlineAuth struct { // Path is the full path for this login request. This is assumed to be // against an OpenBao auth method that takes a role and jwt parameter; // or, roughly equivalent semantic as the JWT auth engine. Path string `json:"path"` // JWT is the JWT to use to authenticate against the OpenBao server. JWT string `json:"jwt"` // Role is the required login authentication role. Role string `json:"role"` // AuthMount is a legacy field sent on older GitLab versions and must be // templated to auth//login. Newer server versions send the // full request path to authenticate via. AuthMount string `json:"auth_mount"` } // GitLabSecretsManagerEngine specifies the secret engine configuration in OpenBao, // including the engine type and mount path. type GitLabSecretsManagerEngine struct { Name string `json:"name"` Path string `json:"path"` } func (j *Job) RepoCleanURL() string { return url_helpers.CleanURL(j.GitInfo.RepoURL) } func (j *Job) JobURL() string { url := strings.TrimSuffix(j.RepoCleanURL(), ".git") return fmt.Sprintf("%s/-/jobs/%d", url, j.ID) } func (j *Job) UnsupportedOptions() error { return errors.Join( j.Image.UnsupportedOptions(), j.Services.UnsupportedOptions(), ) } ================================================ FILE: common/spec/spec_test.go ================================================ //go:build !integration package spec import ( "testing" "github.com/stretchr/testify/require" ) func Test_Image_ExecutorOptions_GetUIDGID(t *testing.T) { tests := map[string]struct { kubernetesOptions func() *ImageKubernetesOptions expectedError bool expectedUID int64 expectedGID int64 }{ "empty user": { kubernetesOptions: func() *ImageKubernetesOptions { return &ImageKubernetesOptions{ User: "", } }, }, "only user": { kubernetesOptions: func() *ImageKubernetesOptions { return &ImageKubernetesOptions{ User: "1000", } }, expectedUID: int64(1000), }, "uid and gid": { kubernetesOptions: func() *ImageKubernetesOptions { return &ImageKubernetesOptions{ User: "1000:1000", } }, expectedUID: int64(1000), expectedGID: int64(1000), }, "invalid user": { kubernetesOptions: func() *ImageKubernetesOptions { return &ImageKubernetesOptions{ User: "gitlab-runner", } }, expectedError: true, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { uid, gid, err := tt.kubernetesOptions().GetUIDGID() if tt.expectedError { require.Error(t, err) require.Equal(t, int64(0), uid) require.Equal(t, int64(0), gid) return } require.NoError(t, err) require.Equal(t, tt.expectedUID, uid) require.Equal(t, tt.expectedGID, gid) }) } } ================================================ FILE: common/spec/variables.go ================================================ package spec import ( "cmp" "fmt" "os" "path" "slices" "strconv" "strings" ) type Variables []Variable func (b Variable) String() string { return fmt.Sprintf("%s=%s", b.Key, b.Value) } const TempProjectDirVariableKey = "RUNNER_TEMP_PROJECT_DIR" // tmpFile will return a canonical temp file path by prepending the job // variables Key with the value of `RUNNER_TEMP_PROJECT_DIR` (typically the // build's temporary directory). The returned path must be further expanded // by/for each shell that uses it. func (b Variables) tmpFile(s string) string { return path.Join(b.Value(TempProjectDirVariableKey), s) } func (b Variables) PublicOrInternal() (variables Variables) { for _, variable := range b { if variable.Public || variable.Internal { variables = append(variables, variable) } } return variables } func (b Variables) StringList() (variables []string) { for _, variable := range b { // For file-type secrets, substitute the path to the secret for the secret // value. if variable.File { v := variable v.Value = b.value(v.Key, true) variables = append(variables, v.String()) } else { variables = append(variables, variable.String()) } } return variables } // GetAllVariableNames returns a semicolon-separated list of all variable names // that are set in the build. This function is used to pass the list of job variable // names to the build container via an environment variable (e.g., RUNNER_JOB_VAR_NAMES), // allowing step-runner to identify and filter out job variables from the OS environment. func (b Variables) GetAllVariableNames() string { names := make([]string, 0, len(b)) for _, variable := range b { names = append(names, variable.Key) } return strings.Join(names, ";") } // Get returns the value of a variable, or if a file type variable, the // pathname to the saved file containing the value, func (b Variables) Get(key string) string { return b.value(key, true) } // Set sets newJobVars on the JobVariables, replacing all existing variables with the same key. // If newJobVars holds variables with the same key, only the last one is set. func (b *Variables) Set(newJobVars ...Variable) { if len(newJobVars) < 1 { return } newVarsByKey := make(map[string]Variable, len(newJobVars)) for _, v := range newJobVars { // for multiple newJobVars with the same key, only keep the last one newVarsByKey[v.Key] = v } *b = slices.DeleteFunc(*b, func(v Variable) bool { _, exists := newVarsByKey[v.Key] return exists }) for _, v := range newVarsByKey { *b = append(*b, v) } } // Value is similar to Get(), but always returns the key value, regardless // of the variable type. File variables therefore return the file contents // and not the path name of the file. func (b Variables) Value(key string) string { return b.value(key, false) } // value returns the contents of the variable by key. // // If the variable type is 'file' and the 'pathnames' parameter is true, then // the pathname of the file containing the contents is returned instead. func (b Variables) value(key string, pathnames bool) string { switch key { case "$": return key case "*", "#", "@", "!", "?", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9": return "" } for i := len(b) - 1; i >= 0; i-- { if b[i].Key == key { if b[i].File && pathnames { return b.tmpFile(b[i].Key) } return b[i].Value } } return "" } // Bool tries to get the boolean value of a variable // "true" and "false" strings are parsed as well as numeric values // where only the value of "1" is considered to be true func (b Variables) Bool(key string) bool { value := b.Get(key) parsedBool, err := strconv.ParseBool(strings.ToLower(value)) if err == nil { return parsedBool } parsedInt, err := strconv.ParseInt(value, 10, 32) if err == nil { return parsedInt == 1 } return false } // OverwriteKey overwrites an existing key with a new variable. func (b Variables) OverwriteKey(key string, variable Variable) { for i, v := range b { if v.Key == key { b[i] = variable return } } } func (b Variables) ExpandValue(value string) string { return os.Expand(value, b.Get) } func (b Variables) Expand() Variables { var variables Variables for _, variable := range b { if !variable.Raw { variable.Value = b.ExpandValue(variable.Value) } variables = append(variables, variable) } return variables } func (b Variables) Masked() (masked []string) { for _, variable := range b { if variable.Masked { masked = append(masked, variable.Value) } } return } // Dedup returns a clone of the JobVariables, where variables with the same key get de-duplicated. // If keepOriginal is true, the first duplicate JobVariable (ie. the original value) is kept, else the last one (ie. the // final overridden value). // The order of variables is not preserved. func (b Variables) Dedup(keepOriginal bool) Variables { clone := slices.Clone(b) if !keepOriginal { // GitLab might give us multiple vars with the same key, with the last one being the final overridden one. In order // to get the original value, we thus reverse the vars, and therefore get the first/original value after doing "sort // | uniq". slices.Reverse(clone) } slices.SortStableFunc(clone, func(a, b Variable) int { return cmp.Compare(a.Key, b.Key) }) return slices.Clip(slices.CompactFunc(clone, func(a, b Variable) bool { return a.Key == b.Key })) } ================================================ FILE: common/spec/variables_test.go ================================================ //go:build !integration package spec import ( "encoding/json" "fmt" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestVariablesJSON(t *testing.T) { var x Variable data := []byte( `{"key": "FOO", "value": "bar", "public": true, "internal": true, "file": true, "masked": true, "raw": true}`, ) err := json.Unmarshal(data, &x) assert.NoError(t, err) assert.Equal(t, "FOO", x.Key) assert.Equal(t, "bar", x.Value) assert.True(t, x.Public) assert.False(t, x.Internal) // cannot be set from the network assert.True(t, x.File) assert.True(t, x.Masked) assert.True(t, x.Raw) } func TestVariableString(t *testing.T) { v := Variable{Key: "key", Value: "value"} assert.Equal(t, "key=value", v.String()) } func TestPublicAndInternalVariables(t *testing.T) { v1 := Variable{Key: "key", Value: "value"} v2 := Variable{Key: "public", Value: "value", Public: true} v3 := Variable{Key: "private", Value: "value", Internal: true} all := Variables{v1, v2, v3} public := all.PublicOrInternal() assert.NotContains(t, public, v1) assert.Contains(t, public, v2) assert.Contains(t, public, v3) } func TestMaskedVariables(t *testing.T) { v1 := Variable{Key: "key", Value: "key_value"} v2 := Variable{Key: "masked", Value: "masked_value", Masked: true} all := Variables{v1, v2} masked := all.Masked() assert.NotContains(t, masked, v1.Value) assert.Contains(t, masked, v2.Value) } func TestListVariables(t *testing.T) { v := Variables{ {Key: "key", Value: "value"}, {Key: "fileKey", Value: "fileValue", File: true}, {Key: "RUNNER_TEMP_PROJECT_DIR", Value: "/foo/bar", Public: true, Internal: true}, } stringList := v.StringList() assert.Len(t, stringList, 3) assert.Equal(t, "key=value", stringList[0]) assert.Equal(t, "fileKey=/foo/bar/fileKey", stringList[1]) assert.Equal(t, "RUNNER_TEMP_PROJECT_DIR=/foo/bar", stringList[2]) } func TestGetVariable(t *testing.T) { v1 := Variable{Key: "key", Value: "key_value"} v2 := Variable{Key: "public", Value: "public_value", Public: true} v3 := Variable{Key: "private", Value: "private_value"} all := Variables{v1, v2, v3} assert.Equal(t, "public_value", all.Get("public")) assert.Empty(t, all.Get("other")) } func TestVariablesExpansion(t *testing.T) { all := Variables{ {Key: "key", Value: "value_of_$public"}, {Key: "public", Value: "some_value", Public: true}, {Key: "private", Value: "value_of_${public}"}, {Key: "public", Value: "value_of_$undefined", Public: true}, } expanded := all.Expand() assert.Len(t, expanded, 4) assert.Equal(t, "value_of_value_of_$undefined", expanded.Get("key")) assert.Equal(t, "value_of_", expanded.Get("public")) assert.Equal(t, "value_of_value_of_$undefined", expanded.Get("private")) assert.Equal(t, "value_of_ value_of_value_of_$undefined", expanded.ExpandValue("${public} ${private}")) } func TestFileVariablesExpansion(t *testing.T) { all := Variables{ {Key: "a_file_var", Value: "some top secret stuff", File: true}, {Key: "ref_file_var", Value: "${a_file_var}.txt"}, {Key: "regular_var", Value: "bla bla bla"}, {Key: "ref_regular_var", Value: "bla bla bla"}, {Key: "RUNNER_TEMP_PROJECT_DIR", Value: "/foo/bar", Public: true, Internal: true}, } validate := func(t *testing.T, variables Variables) { assert.Len(t, variables, 5) // correct expansion of file variables assert.Equal(t, "/foo/bar/a_file_var", variables.Get("a_file_var")) assert.Equal(t, "some top secret stuff", variables.Value("a_file_var")) // correct expansion of variables that reference file variables assert.Equal(t, "/foo/bar/a_file_var.txt", variables.Get("ref_file_var")) assert.Equal(t, "/foo/bar/a_file_var.txt", variables.Value("ref_file_var")) assert.Equal(t, "/foo/bar/a_file_var.txt.blammo", variables.ExpandValue("${ref_file_var}.blammo")) assert.Equal(t, "/foo/bar/a_file_var.blammo", variables.ExpandValue("${a_file_var}.blammo")) // correct expansion of regular variables, and variables that reference // regular variables assert.Equal(t, "bla bla bla", variables.Get("regular_var")) assert.Equal(t, "bla bla bla", variables.Get("ref_regular_var")) assert.Equal(t, "bla bla bla", variables.Value("regular_var")) assert.Equal(t, "bla bla bla", variables.Value("ref_regular_var")) } expanded := all.Expand() validate(t, expanded) // calling Expand multiple times is idempotent. validate(t, expanded.Expand()) } func TestSpecialVariablesExpansion(t *testing.T) { all := Variables{ {Key: "key", Value: "$$"}, {Key: "key2", Value: "$/dsa", Public: true}, {Key: "key3", Value: "aa$@bb"}, {Key: "key4", Value: "aa${@}bb"}, } expanded := all.Expand() assert.Len(t, expanded, 4) assert.Equal(t, "$", expanded.Get("key")) assert.Equal(t, "$/dsa", expanded.Get("key2")) assert.Equal(t, "aabb", expanded.Get("key3")) assert.Equal(t, "aabb", expanded.Get("key4")) } func TestOverwriteKey(t *testing.T) { vars := Variables{ {Key: "hello", Value: "world"}, {Key: "foo", Value: ""}, } // Overwrite empty value vars.OverwriteKey("foo", Variable{Key: "foo", Value: "bar"}) assert.Equal(t, "world", vars.Get("hello")) assert.Equal(t, "bar", vars.Get("foo")) // Overwrite existing value vars.OverwriteKey("hello", Variable{Key: "hello", Value: "universe"}) assert.Equal(t, "universe", vars.Get("hello")) assert.Equal(t, "bar", vars.Get("foo")) // Overwrite key vars.OverwriteKey("hello", Variable{Key: "goodbye", Value: "universe"}) assert.Equal(t, "universe", vars.Get("goodbye")) assert.Equal(t, "", vars.Get("hello")) assert.Equal(t, "bar", vars.Get("foo")) // Overwrite properties fooOverwriteVar := Variable{ Key: "foo", Value: "baz", Public: true, Internal: true, File: true, Masked: true, Raw: true, } vars.OverwriteKey("foo", fooOverwriteVar) assert.Equal(t, fooOverwriteVar, vars[1]) } type multipleKeyUsagesTestCase struct { variables Variables expectedValue string } func TestMultipleUsageOfAKey(t *testing.T) { getVariable := func(value string) Variable { return Variable{Key: "key", Value: value} } tests := map[string]multipleKeyUsagesTestCase{ "defined at job level": { variables: Variables{ getVariable("from-job"), }, expectedValue: "from-job", }, "defined at default and job level": { variables: Variables{ getVariable("from-default"), getVariable("from-job"), }, expectedValue: "from-job", }, "defined at config, default and job level": { variables: Variables{ getVariable("from-config"), getVariable("from-default"), getVariable("from-job"), }, expectedValue: "from-job", }, "defined at config and default level": { variables: Variables{ getVariable("from-config"), getVariable("from-default"), }, expectedValue: "from-default", }, "defined at config level": { variables: Variables{ getVariable("from-config"), }, expectedValue: "from-config", }, } for name, testCase := range tests { t.Run(name, func(t *testing.T) { for i := 0; i < 100; i++ { require.Equal(t, testCase.expectedValue, testCase.variables.Get("key")) } }) } } func TestRawVariableExpansion(t *testing.T) { tests := map[bool]string{ true: "value_of_${base}", false: "value_of_base_value", } for raw, expectedValue := range tests { t.Run(fmt.Sprintf("raw-%v", raw), func(t *testing.T) { variables := Variables{ {Key: "base", Value: "base_value"}, {Key: "related", Value: "value_of_${base}", Raw: raw}, } expanded := variables.Expand() assert.Equal(t, expectedValue, expanded.Get("related")) }) } } func TestBoolVariables(t *testing.T) { tests := map[string]bool{ "true": true, "TRUE": true, "tRuE": true, "false": false, "FALSE": false, "fAlsE": false, "1": true, "-1": false, "0": false, "100": false, "": false, "something else": false, } for value, expected := range tests { t.Run(value, func(t *testing.T) { v := Variables{ {Key: "variable", Value: value}, } result := v.Bool("variable") require.Equal(t, expected, result) }) } } func Test_JobVariables_Set(t *testing.T) { tests := map[string]struct { jobVars Variables set Variables expected []string }{ "noop": {}, "add one": { set: Variables{ {Key: "foo", Value: "don't use that foo"}, {Key: "foo", Value: "the new foo"}, }, expected: []string{"foo=the new foo"}, }, "overwrite one": { jobVars: Variables{ {Key: "foo", Value: "this foo gets overridden"}, {Key: "foo", Value: "this one too"}, }, set: Variables{ {Key: "foo", Value: "new foo"}, }, expected: []string{"foo=new foo"}, }, "overwrite and add": { jobVars: Variables{ {Key: "foo", Value: "this foo gets overridden"}, {Key: "org", Value: "the org keeps as is"}, {Key: "foo", Value: "this one too"}, }, set: Variables{ {Key: "bar", Value: "don't use that bar"}, {Key: "foo", Value: "new foo"}, {Key: "bar", Value: "new bar"}, }, expected: []string{"foo=new foo", "bar=new bar", "org=the org keeps as is"}, }, "duplicates are preserved if not set": { jobVars: Variables{ {Key: "foo", Value: "1st foo"}, {Key: "blerp", Value: "nope"}, {Key: "foo", Value: "2nd foo"}, {Key: "foo", Value: "3rd foo"}, }, set: Variables{ {Key: "blerp", Value: "blerp!"}, }, expected: []string{"blerp=blerp!", "foo=1st foo", "foo=2nd foo", "foo=3rd foo"}, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { jv := test.jobVars jv.Set(test.set...) actual := jv.StringList() assert.ElementsMatch(t, actual, test.expected) }) } } func Test_JobVariables_Dedup(t *testing.T) { vars := Variables{ {Key: "foo-key", Value: "foo"}, {Key: "some-key", Value: "this is the original"}, {Key: "bar-key", Value: "bar"}, {Key: "some-key", Value: "this is unused"}, {Key: "baz-key", Value: "baz"}, {Key: "some-key", Value: "this is overridden"}, {Key: "blerp-key", Value: "blerp"}, } tests := []struct { name string keepOriginal bool expectedVars Variables }{ { name: "keep overridden", expectedVars: Variables{ {Key: "bar-key", Value: "bar"}, {Key: "baz-key", Value: "baz"}, {Key: "blerp-key", Value: "blerp"}, {Key: "foo-key", Value: "foo"}, {Key: "some-key", Value: "this is overridden"}, }, }, { name: "keep original", keepOriginal: true, expectedVars: Variables{ {Key: "bar-key", Value: "bar"}, {Key: "baz-key", Value: "baz"}, {Key: "blerp-key", Value: "blerp"}, {Key: "foo-key", Value: "foo"}, {Key: "some-key", Value: "this is the original"}, }, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { assert.Equal(t, tc.expectedVars, vars.Dedup(tc.keepOriginal)) }) } } ================================================ FILE: common/steps.go ================================================ package common import ( "runtime" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) // Native steps execution is enabled if: // - we are not running on windows // - the executor supports native steps. // - the job uses the run keyword or script_to_steps migraton is active. func (b *Build) UseNativeSteps() bool { if runtime.GOOS == "windows" { return false } if !b.ExecutorFeatures.NativeStepsIntegration { return false } return len(b.Job.Run) > 0 || b.IsFeatureFlagOn(featureflags.UseScriptToStepMigration) || b.IsFeatureFlagOn(featureflags.UseConcrete) } ================================================ FILE: common/support.go ================================================ //nolint:goconst package common import ( "crypto/rand" "crypto/rsa" "crypto/x509" "crypto/x509/pkix" "encoding/pem" "fmt" "math/big" "net/http" "os" "os/exec" "path" "runtime" "strings" "sync/atomic" "time" "github.com/stretchr/testify/assert/yaml" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/step-runner/schema/v1" ) const ( repoRemoteURL = "https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test.git" repoRefType = spec.RefTypeBranch repoSHA = "69b18e5ed3610cf646119c3e38f462c64ec462b7" repoBeforeSHA = "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7" repoRefName = "main" repoLFSSHA = "c8f2a61def956871b91f73fcd0c320afb257fd6e" repoLFSBeforeSHA = "86002a2304d89a193f91b8b0907c4cf2f95a6d28" repoLFSRefName = "add-lfs-object" repoSubmoduleLFSSHA = "86002a2304d89a193f91b8b0907c4cf2f95a6d28" repoSubmoduleLFSBeforeSHA = "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7" repoSubmoduleLFSRefName = "add-lfs-submodule" repoStepsSHA = "1142c6530a1eb81f0a5476db25fbfbf9a4e08f30" repoStepsBeforeSHA = "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7" repoStepsRefName = "add-steps" FilesLFSFile1LFSsize = int64(2097152) ) var ( gitLabComChain string gitLabComChainFetched atomic.Bool ) func GetGitInfo(url string) spec.GitInfo { return spec.GitInfo{ RepoURL: url, Sha: repoSHA, BeforeSha: repoBeforeSHA, Ref: repoRefName, RefType: repoRefType, Refspecs: []string{"+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*"}, } } func GetLFSGitInfo(url string) spec.GitInfo { return spec.GitInfo{ RepoURL: url, Sha: repoLFSSHA, BeforeSha: repoLFSBeforeSHA, Ref: repoLFSRefName, RefType: repoRefType, Refspecs: []string{"+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*"}, } } func GetSubmoduleLFSGitInfo(url string) spec.GitInfo { return spec.GitInfo{ RepoURL: url, Sha: repoSubmoduleLFSSHA, BeforeSha: repoSubmoduleLFSBeforeSHA, Ref: repoSubmoduleLFSRefName, RefType: repoRefType, Refspecs: []string{"+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*"}, } } func GetStepsGitInfo(url string) spec.GitInfo { return spec.GitInfo{ RepoURL: url, Sha: repoStepsSHA, BeforeSha: repoStepsBeforeSHA, Ref: repoStepsRefName, RefType: repoRefType, Refspecs: []string{"+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*"}, } } func GetSuccessfulBuild() (spec.Job, error) { return GetLocalBuildResponse("echo Hello World") } func GetSuccessfulMultilineCommandBuild() (spec.Job, error) { return GetLocalBuildResponse(`echo "Hello World"`) } func GetRemoteSuccessfulBuild() (spec.Job, error) { return GetRemoteBuildResponse("echo Hello World") } func GetRemoteSuccessfulLFSBuild() (spec.Job, error) { response, err := GetRemoteBuildResponse("echo Hello World") response.GitInfo = GetLFSGitInfo(repoRemoteURL) return response, err } func GetRemoteSuccessfulBuildWithAfterScript() (spec.Job, error) { jobResponse, err := GetRemoteBuildResponse("echo Hello World") jobResponse.Steps = append( jobResponse.Steps, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{"echo Hello World"}, When: spec.StepWhenAlways, }, ) return jobResponse, err } func GetRemoteSuccessfulBuildPrintVars(shell string, vars ...string) (spec.Job, error) { printVarsCmd := getShellPrintVars(shell, vars...) return GetRemoteBuildResponse(printVarsCmd...) } func GetRemoteSuccessfulBuildPrintVarsAfterScript(shell string, vars ...string) (spec.Job, error) { printVarsCmd := getShellPrintVars(shell, vars...) return GetRemoteBuildResponse(printVarsCmd...) } func GetRemoteSuccessfulMultistepBuild() (spec.Job, error) { jobResponse, err := GetRemoteBuildResponse("echo Hello World") if err != nil { return spec.Job{}, err } jobResponse.Steps = append( jobResponse.Steps, spec.Step{ Name: "release", Script: []string{"echo Release"}, When: spec.StepWhenOnSuccess, }, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{"echo After Script"}, When: spec.StepWhenAlways, }, ) return jobResponse, nil } func GetRemoteFailingMultistepBuild(failingStepName spec.StepName) (spec.Job, error) { jobResponse, err := GetRemoteSuccessfulMultistepBuild() if err != nil { return spec.Job{}, err } for i, step := range jobResponse.Steps { if step.Name == failingStepName { jobResponse.Steps[i].Script = append(step.Script, "exit 1") //nolint:gocritic } } return jobResponse, nil } func GetRemoteFailingMultistepBuildPrintVars(shell string, fail bool, vars ...string) (spec.Job, error) { jobResponse, err := GetRemoteBuildResponse("echo 'Hello World'") if err != nil { return spec.Job{}, err } printVarsCmd := getShellPrintVars(shell, vars...) exitCommand := "exit 0" if fail { exitCommand = "exit 1" } jobResponse.Steps = append( jobResponse.Steps, spec.Step{ Name: "env", Script: append(printVarsCmd, exitCommand), When: spec.StepWhenOnSuccess, }, spec.Step{ Name: spec.StepNameAfterScript, Script: printVarsCmd, When: spec.StepWhenAlways, }, ) return jobResponse, nil } func getShellPrintVars(shell string, vars ...string) []string { var envCommand []string var fmtStr string switch shell { case "powershell", "pwsh": fmtStr = "echo %s=$env:%s" default: fmtStr = "echo %s=$%s" } for _, v := range vars { envCommand = append(envCommand, fmt.Sprintf(fmtStr, v, v)) } return envCommand } func GetRemoteSuccessfulBuildWithDumpedVariables() (spec.Job, error) { variableName := "test_dump" variableValue := "test" response, err := GetRemoteBuildResponse( fmt.Sprintf("[[ \"${%s}\" != \"\" ]]", variableName), fmt.Sprintf("[[ $(cat $%s) == \"%s\" ]]", variableName, variableValue), ) if err != nil { return spec.Job{}, err } dumpedVariable := spec.Variable{ Key: variableName, Value: variableValue, Internal: true, Public: true, File: true, } response.Variables = append(response.Variables, dumpedVariable) return response, nil } func GetFailedBuild() (spec.Job, error) { return GetLocalBuildResponse("exit 1") } func GetRemoteFailedBuild() (spec.Job, error) { return GetRemoteBuildResponse("exit 1") } func GetLongRunningBuild() (spec.Job, error) { return GetLocalBuildResponse("sleep 3600") } func GetRemoteLongRunningBuild() (spec.Job, error) { return GetRemoteBuildResponse("sleep 3600") } func GetRemoteLongRunningBuildWithAfterScript(shell string) (spec.Job, error) { var jobResponse spec.Job var err error jobResponse, err = GetRemoteLongRunningBuild() if err != nil { return spec.Job{}, err } switch shell { default: jobResponse.Steps = append(jobResponse.Steps, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{ "echo \"Hello World from after_script\"", "echo \"job status $CI_JOB_STATUS\"", }, }) case "pwsh": jobResponse.Steps = append(jobResponse.Steps, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{ "echo \"Hello World from after_script\"", "echo \"job status $env:CI_JOB_STATUS\"", }, }) case "cmd": jobResponse.Steps = append(jobResponse.Steps, spec.Step{ Name: spec.StepNameAfterScript, Script: []string{ "echo \"Hello World from after_script\"", "echo \"job status %CI_JOB_STATUS%\"", }, }) } return jobResponse, nil } func GetMultilineBashBuild() (spec.Job, error) { return GetRemoteBuildResponse(`if true; then echo 'Hello World' fi `) } func GetMultilineBashBuildPowerShell() (spec.Job, error) { return GetRemoteBuildResponse("if (0 -eq 0) {\n\recho \"Hello World\"\n\r}") } func GetRemoteBrokenTLSBuild() (spec.Job, error) { invalidCert, err := buildSnakeOilCert() if err != nil { return spec.Job{}, err } return getRemoteCustomTLSBuild(invalidCert) } func GetRemoteGitLabComTLSBuild() (spec.Job, error) { cert, err := getGitLabComTLSChain() if err != nil { return spec.Job{}, err } return getRemoteCustomTLSBuild(cert) } func getRemoteCustomTLSBuild(chain string) (spec.Job, error) { job, err := GetRemoteBuildResponse("echo Hello World") if err != nil { return spec.Job{}, err } job.TLSData.CAChain = chain job.Variables = append( job.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "clone"}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: "normal"}, ) return job, nil } func getBuildResponse(repoURL string, commands []string) spec.Job { return spec.Job{ Variables: spec.Variables{ spec.Variable{Key: "CI_JOB_TOKEN", Value: "test-job-token"}, }, GitInfo: GetGitInfo(repoURL), Steps: spec.Steps{ spec.Step{ Name: spec.StepNameScript, Script: commands, When: spec.StepWhenAlways, AllowFailure: false, }, }, RunnerInfo: spec.RunnerInfo{ Timeout: DefaultTimeout, }, } } func getStepsBuildResponse(repoURL, stepsYAML string) (spec.Job, error) { var steps []schema.Step if err := yaml.Unmarshal([]byte(stepsYAML), &steps); err != nil { return spec.Job{}, err } return spec.Job{ GitInfo: GetStepsGitInfo(repoURL), Run: steps, Steps: spec.Steps{spec.Step{Name: spec.StepNameRun}}, RunnerInfo: spec.RunnerInfo{ Timeout: DefaultTimeout, }, }, nil } func GetRemoteStepsBuildResponse(stepsYAML string) (spec.Job, error) { return getStepsBuildResponse(repoRemoteURL, stepsYAML) } func GetRemoteBuildResponse(commands ...string) (spec.Job, error) { return getBuildResponse(repoRemoteURL, commands), nil } func GetLocalBuildResponse(commands ...string) (spec.Job, error) { localRepoURL, err := getLocalRepoURL() if err != nil { if os.IsNotExist(err) { panic("Local repo not found, please run `make development_setup`") } return spec.Job{}, err } return getBuildResponse(localRepoURL, commands), nil } func getLocalRepoURL() (string, error) { _, filename, _, _ := runtime.Caller(0) //nolint:dogsled directory := path.Dir(filename) if strings.Contains(directory, "_test/_obj_test") { pwd, err := os.Getwd() if err != nil { return "", err } directory = pwd } localRepoURL := path.Clean(directory + "/../tmp/gitlab-test/.git") _, err := os.Stat(localRepoURL) if err != nil { return "", err } return localRepoURL, nil } func RunLocalRepoGitCommand(arguments ...string) error { url, err := getLocalRepoURL() if err != nil { return err } cmd := exec.Command("git", arguments...) cmd.Dir = path.Dir(url) return cmd.Run() } func buildSnakeOilCert() (string, error) { priv, err := rsa.GenerateKey(rand.Reader, 1024) if err != nil { return "", err } notBefore := time.Now() notAfter := notBefore.Add(time.Hour) template := x509.Certificate{ SerialNumber: big.NewInt(1), Subject: pkix.Name{ Organization: []string{"Snake Oil Co"}, }, NotBefore: notBefore, NotAfter: notAfter, IsCA: true, KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign, ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth}, BasicConstraintsValid: true, } derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, &priv.PublicKey, priv) if err != nil { return "", err } certificate := pem.EncodeToMemory(&pem.Block{Type: "CERTIFICATE", Bytes: derBytes}) return string(certificate), nil } func getGitLabComTLSChain() (string, error) { if gitLabComChainFetched.Load() { return gitLabComChain, nil } resp, err := http.Head("https://gitlab.com/users/sign_in") if err != nil { return "", err } defer func() { _ = resp.Body.Close() }() var buff strings.Builder for _, certs := range resp.TLS.VerifiedChains { for _, cert := range certs { err = pem.Encode(&buff, &pem.Block{Type: "CERTIFICATE", Bytes: cert.Raw}) if err != nil { return "", err } } } gitLabComChain = buff.String() gitLabComChainFetched.Store(true) return gitLabComChain, nil } ================================================ FILE: common/test.go ================================================ package common import "testing" func Int64Ptr(v int64) *int64 { return &v } type TestRunnerConfig struct { RunnerConfig *RunnerConfig } func NewTestRunnerConfig() *TestRunnerConfig { return &TestRunnerConfig{ RunnerConfig: &RunnerConfig{}, } } func (c *TestRunnerConfig) WithAutoscalerConfig(ac *AutoscalerConfig) *TestRunnerConfig { c.RunnerConfig.Autoscaler = ac return c } func (c *TestRunnerConfig) WithToken(token string) *TestRunnerConfig { c.RunnerConfig.RunnerCredentials.Token = token return c } type TestAutoscalerConfig struct { AutoscalerConfig *AutoscalerConfig } func NewTestAutoscalerConfig() *TestAutoscalerConfig { return &TestAutoscalerConfig{ AutoscalerConfig: &AutoscalerConfig{}, } } func (c *TestAutoscalerConfig) WithPolicies(policies ...AutoscalerPolicyConfig) *TestAutoscalerConfig { c.AutoscalerConfig.Policy = policies return c } // mockLightJobTrace is wrapper around common.MockJobTrace. // The only difference is the Write method which does // nothing but return the length of data it receives. // // This is done as mockery generated mocks maintain // and internal state to make assertion but for this // particular test it leads to excessive use of memory // sometimes more than 50GB as the build test generates // a lot of logs and processes them. // // This leads to OOM kills with Kubernetes runners. // // Note: When using mockLightJobTrace assert on Write method // will not work. type mockLightJobTrace struct { *MockJobTrace } func NewMockLightJobTrace(t *testing.T) *mockLightJobTrace { return &mockLightJobTrace{ MockJobTrace: NewMockJobTrace(t), } } func (l *mockLightJobTrace) Write(p []byte) (int, error) { return len(p), nil } ================================================ FILE: common/trace.go ================================================ package common import ( "context" "io" "os" "sync" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) type Trace struct { Writer io.Writer cancelFunc context.CancelFunc abortFunc context.CancelFunc mutex sync.Mutex } const ExitCodeUnsupportedOptions = 3 type JobFailureData struct { Reason spec.JobFailureReason ExitCode int Mode JobExecutionMode } func (s *Trace) Write(p []byte) (n int, err error) { s.mutex.Lock() defer s.mutex.Unlock() if s.Writer == nil { return 0, os.ErrInvalid } return s.Writer.Write(p) } func (s *Trace) SetDebugModeEnabled(_ bool) { } func (s *Trace) Success() error { return nil } func (s *Trace) Fail(err error, failureData JobFailureData) error { return nil } func (s *Trace) Finish() { } func (s *Trace) SetCancelFunc(cancelFunc context.CancelFunc) { s.mutex.Lock() defer s.mutex.Unlock() s.cancelFunc = cancelFunc } func (s *Trace) Cancel() bool { s.mutex.Lock() defer s.mutex.Unlock() if s.cancelFunc == nil { return false } s.cancelFunc() return true } func (s *Trace) SetAbortFunc(abortFunc context.CancelFunc) { s.mutex.Lock() defer s.mutex.Unlock() s.abortFunc = abortFunc } func (s *Trace) Abort() bool { s.mutex.Lock() defer s.mutex.Unlock() if s.abortFunc == nil { return false } // Abort always have much higher importance than Cancel // as abort interrupts the execution s.cancelFunc = nil s.abortFunc() return true } func (s *Trace) SetFailuresCollector(fc FailuresCollector) {} func (s *Trace) SetSupportedFailureReasonMapper(f SupportedFailureReasonMapper) {} func (s *Trace) IsStdout() bool { return true } ================================================ FILE: common/usage_log.go ================================================ package common import ( "gitlab.com/gitlab-org/gitlab-runner/helpers/usage_log" ) func UsageLogRecordFrom(runner *RunnerConfig, build *Build) usage_log.Record { record := usage_log.Record{ Runner: usage_log.Runner{ ID: runner.ShortDescription(), Name: runner.Name, SystemID: runner.GetSystemID(), Executor: runner.Executor, }, Job: usage_log.Job{ URL: build.JobURL(), DurationSeconds: build.FinalDuration().Seconds(), Status: build.CurrentState().String(), FailureReason: build.FailureReason().String(), StartedAt: build.StartedAt().UTC(), FinishedAt: build.FinishedAt().UTC(), PipelineID: build.JobInfo.PipelineID, Project: usage_log.Project{ ID: build.JobInfo.ProjectID, Name: build.JobInfo.ProjectName, FullPath: build.JobInfo.ProjectFullPath, }, Namespace: usage_log.Namespace{ ID: build.JobInfo.NamespaceID, }, RootNamespace: usage_log.Namespace{ ID: build.JobInfo.RootNamespaceID, }, Organization: usage_log.Organization{ ID: build.JobInfo.OrganizationID, }, Instance: usage_log.Instance{ ID: build.JobInfo.InstanceID, UniqueID: build.JobInfo.InstanceUUID, }, User: usage_log.User{ ID: build.JobInfo.UserID, }, }, Labels: runner.ComputedLabels(), } if build.JobInfo.ScopedUserID != nil { record.Job.ScopedUser.ID = *build.JobInfo.ScopedUserID } return record } ================================================ FILE: common/version.go ================================================ package common import ( "fmt" "runtime" "runtime/debug" "github.com/prometheus/client_golang/prometheus" "github.com/urfave/cli" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) var ( NAME = "gitlab-runner" VERSION = "" REVISION = "" BRANCH = "HEAD" BUILT = "" ) var AppVersion = AppVersionInfo{ Name: NAME, Version: VERSION, Revision: REVISION, Branch: BRANCH, GOVersion: runtime.Version(), BuiltAt: BUILT, OS: runtime.GOOS, Architecture: runtime.GOARCH, } type AppVersionInfo struct { Name string `json:"name"` Version string `json:"version"` Revision string `json:"revision"` Branch string `json:"branch"` GOVersion string `json:"go_version"` BuiltAt string `json:"built_at"` OS string `json:"os"` Architecture string `json:"architecture"` } func (v *AppVersionInfo) Printer(c *cli.Context) { fmt.Print(v.Extended()) } func (v *AppVersionInfo) Line() string { return fmt.Sprintf("%s %s (%s)", v.Name, v.Version, v.Revision) } func (v *AppVersionInfo) ShortLine() string { return fmt.Sprintf("%s (%s)", v.Version, v.Revision) } func (v *AppVersionInfo) UserAgent() string { return fmt.Sprintf("%s %s (%s; %s; %s/%s)", v.Name, v.Version, v.Branch, v.GOVersion, v.OS, v.Architecture) } func (v *AppVersionInfo) Variables() spec.Variables { return spec.Variables{ {Key: "CI_RUNNER_VERSION", Value: v.Version, Public: true, Internal: true, File: false}, {Key: "CI_RUNNER_REVISION", Value: v.Revision, Public: true, Internal: true, File: false}, { Key: "CI_RUNNER_EXECUTABLE_ARCH", Value: fmt.Sprintf("%s/%s", v.OS, v.Architecture), Public: true, Internal: true, File: false, }, } } func (v *AppVersionInfo) Extended() string { version := fmt.Sprintf("Version: %s\n", v.Version) version += fmt.Sprintf("Git revision: %s\n", v.Revision) version += fmt.Sprintf("Git branch: %s\n", v.Branch) version += fmt.Sprintf("GO version: %s\n", v.GOVersion) version += fmt.Sprintf("Built: %s\n", v.BuiltAt) version += fmt.Sprintf("OS/Arch: %s/%s\n", v.OS, v.Architecture) return version } // NewMetricsCollector returns a prometheus.Collector which represents current build information. func (v *AppVersionInfo) NewMetricsCollector() *prometheus.GaugeVec { labels := map[string]string{ "name": v.Name, "version": v.Version, "revision": v.Revision, "branch": v.Branch, "go_version": v.GOVersion, "built_at": v.BuiltAt, "os": v.OS, "architecture": v.Architecture, } labelNames := make([]string, 0, len(labels)) for n := range labels { labelNames = append(labelNames, n) } buildInfo := prometheus.NewGaugeVec( prometheus.GaugeOpts{ Name: "gitlab_runner_version_info", Help: "A metric with a constant '1' value labeled by different build stats fields.", }, labelNames, ) buildInfo.With(labels).Set(1) return buildInfo } func init() { info, ok := debug.ReadBuildInfo() if ok { if AppVersion.Version == "" { AppVersion.Version = info.Main.Version } for _, setting := range info.Settings { switch { case setting.Key == "vcs.revision" && AppVersion.Revision == "" && len(setting.Value) >= 8: AppVersion.Revision = setting.Value[:8] case setting.Key == "vcs.time" && AppVersion.BuiltAt == "": AppVersion.BuiltAt = setting.Value } } } if AppVersion.Version == "" || AppVersion.Version == "(devel)" { AppVersion.Version = "development version" } if AppVersion.Revision == "" { AppVersion.Revision = "HEAD" } } ================================================ FILE: config.toml.example ================================================ concurrent = 4 [[runners]] name = "shell" url = "https://gitlab.com/" token = "TOKEN" limit = 2 executor = "shell" builds_dir = "" shell = "bash" [[runners]] name = "ruby-3.1-docker" url = "https://gitlab.com/" token = "TOKEN" limit = 0 executor = "docker" builds_dir = "" [runners.docker] host = "" image = "ruby:3.1" privileged = false disable_cache = false cache_dir = "" [[runners]] name = "production-server" url = "https://gitlab.com/" token = "TOKEN" limit = 0 executor = "ssh" builds_dir = "" [runners.ssh] host = "my-production-server" port = "22" user = "root" password = "production-server-password" ================================================ FILE: dockerfiles/runner/Dockerfile ================================================ ARG BASE_IMAGE FROM $BASE_IMAGE ARG TARGETOS ARG TARGETARCH ARG SRC_SUFFIX="" COPY --from=binary_dir gitlab-runner-${TARGETOS}-${TARGETARCH}${SRC_SUFFIX} /usr/bin/gitlab-runner COPY --from=packaging_dir clear-docker-cache /usr/share/gitlab-runner/ ================================================ FILE: dockerfiles/runner/docker-bake.hcl ================================================ variable "RUNNER_IMAGES_REGISTRY" { default = "registry.gitlab.com/gitlab-org/ci-cd/runner-tools/base-images" } variable "RUNNER_IMAGES_VERSION" { default = "0.0.0" } variable "LOCAL_ARCH" { default = "amd64" } variable "LOCAL_FLAVOR" { default = "alpine-latest" } common-platforms = [ "linux/amd64", "linux/arm64", "linux/s390x", "linux/ppc64le", "linux/riscv64" ] alpine-platforms = { "3.21" : common-platforms, "latest" : common-platforms, } target "base" { contexts = { binary_dir = "../../out/binaries/" packaging_dir = "../../packaging/root/usr/share/gitlab-runner/" } platforms = common-platforms } target "ubuntu" { inherits = ["base"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner:${RUNNER_IMAGES_VERSION}-ubuntu" } output = ["type=oci,dest=./../../out/runner-images/ubuntu.tar,tar=true"] } target "ubi-fips" { inherits = ["base"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner:${RUNNER_IMAGES_VERSION}-ubi-fips" SRC_SUFFIX = "-fips" } platforms = ["linux/amd64"] output = ["type=oci,dest=./../../out/runner-images/ubi-fips.tar,tar=true"] } target "alpine" { inherits = ["base"] name = "alpine-${replace(version, ".", "-")}" matrix = { version = keys(alpine-platforms) } args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner:${RUNNER_IMAGES_VERSION}-alpine-${version}" } platforms = alpine-platforms[version] output = ["type=oci,dest=./../../out/runner-images/alpine-${version}.tar,tar=true"] } # Used for local testing, creates the gitlab-runner:local image in the user's current docker context target "local-image" { inherits = ["base"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner:${RUNNER_IMAGES_VERSION}-${LOCAL_FLAVOR}" } platforms = ["linux/${LOCAL_ARCH}"] output = ["type=docker"] tags = ["gitlab-runner:local"] } group "all" { targets = [ "ubuntu", "alpine", "ubi-fips", ] } ================================================ FILE: dockerfiles/runner-helper/Dockerfile ================================================ ARG BASE_IMAGE FROM $BASE_IMAGE ARG TARGETOS ARG TARGETARCH ARG SRC_SUFFIX="" ARG DST_SUFFIX="" ARG DST_DIR="/usr/bin" COPY --from=binary_dir gitlab-runner-helper.${TARGETOS}-${TARGETARCH}${SRC_SUFFIX} ${DST_DIR}/gitlab-runner-helper${DST_SUFFIX} ================================================ FILE: dockerfiles/runner-helper/Dockerfile.concrete ================================================ ARG BASE_IMAGE FROM $BASE_IMAGE ARG TARGETOS ARG TARGETARCH ARG SRC_SUFFIX="" COPY --from=binary_dir gitlab-runner-helper.${TARGETOS}-${TARGETARCH}${SRC_SUFFIX} /usr/bin/gitlab-runner-helper CMD ["/usr/bin/gitlab-runner-helper"] ================================================ FILE: dockerfiles/runner-helper/docker-bake.hcl ================================================ variable "RUNNER_IMAGES_REGISTRY" { default = "registry.gitlab.com/gitlab-org/ci-cd/runner-tools/base-images" } variable "RUNNER_IMAGES_VERSION" { default = "0.0.0" } variable "LOCAL_ARCH" { default = "amd64" } variable "LOCAL_FLAVOR" { default = "alpine-latest" } common-platforms = [ "linux/amd64", "linux/arm", "linux/arm64", "linux/s390x", "linux/ppc64le", "linux/riscv64" ] alpine-platforms = { "3.21" : common-platforms, "latest" : common-platforms, "edge" : common-platforms, } target "base" { contexts = { binary_dir = "../../out/binaries/gitlab-runner-helper" } } target "alpine" { inherits = ["base"] name = "alpine-${replace(v.version, ".", "-")}-${v.arch}" matrix = { v = flatten([ for key, values in alpine-platforms : [ for plat in values : { version : key, arch : split("/", plat)[1] } ] ]) } args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-alpine-${v.version}" } platforms = ["linux/${v.arch}"] output = ["type=oci,dest=./../../out/helper-images/alpine${v.version == "latest" || v.version == "edge" ? "-${v.version}" : v.version}-${v.arch == "amd64" ? "x86_64" : v.arch}.tar"] } target "alpine-pwsh" { inherits = ["base"] name = "alpine-${replace(version, ".", "-")}-pwsh" matrix = { version = keys(alpine-platforms) } platforms = ["linux/amd64"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-alpine-${version}-pwsh" } output = ["type=oci,dest=./../../out/helper-images/alpine${version == "latest" || version == "edge" ? "-${version}" : version}-x86_64-pwsh.tar,tar=true"] } target "ubuntu" { inherits = ["base"] name = "ubuntu-${replace(platform, "/", "-")}" matrix = { platform = common-platforms } platforms = [platform] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-ubuntu" } output = ["type=oci,dest=./../../out/helper-images/ubuntu-${split("/", platform)[1] == "amd64" ? "x86_64" : split("/", platform)[1]}.tar,tar=true"] } target "ubuntu-pwsh" { inherits = ["base"] name = "ubuntu-${replace(platform, "/", "-")}-pwsh" matrix = { platform = ["linux/amd64", "linux/arm64"] } platforms = [platform] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-ubuntu-pwsh" } output = ["type=oci,dest=./../../out/helper-images/ubuntu-${split("/", platform)[1] == "amd64" ? "x86_64" : split("/", platform)[1]}-pwsh.tar,tar=true"] } target "ubi-fips" { inherits = ["base"] platforms = ["linux/amd64"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-ubi-fips" SRC_SUFFIX = "-fips" } output = ["type=oci,dest=./../../out/helper-images/ubi-fips-x86_64.tar,tar=true"] } target "concrete" { inherits = ["base"] name = "concrete-${replace(platform, "/", "-")}" matrix = { platform = common-platforms } platforms = [platform] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-concrete" } dockerfile = "Dockerfile.concrete" output = ["type=oci,dest=./../../out/helper-images/concrete-${split("/", platform)[1] == "amd64" ? "x86_64" : split("/", platform)[1]}.tar,tar=true"] } target "windows" { inherits = ["base"] name = "windows-${replace(item.version, ":", "-")}" matrix = { item = [ { version = "nanoserver:ltsc2019", arch = "amd64" }, { version = "nanoserver:ltsc2022", arch = "amd64" }, { version = "servercore:ltsc2019", arch = "amd64" }, { version = "servercore:ltsc2022", arch = "amd64" }, { version = "servercore:ltsc2025", arch = "amd64" }, { version = "servercore:ltsc2025-arm64", arch = "arm64" } ] } platforms = ["windows/${item.arch}"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-${replace(item.version, ":", "-")}" SRC_SUFFIX = ".exe" DST_SUFFIX = ".exe" TARGETARCH = "amd64" # Force override of TARGETARCH because arm64 runner-helper is not yet available; amd64 version of runner-helper works on arm64 Windows via emulation. DST_DIR = "/Program Files/gitlab-runner-helper" } # Note: "arm64" is already in the name of version so "arm64" is not appended to the name of the tar output = ["type=oci,dest=./../../out/helper-images/windows-${replace(item.version, ":", "-")}${item.arch == "amd64" ? "-x86_64" : ""}.tar,tar=true"] } # Used for local testing, creates the gitlab-runner-helper:local image in the user's current docker context target "local-image" { inherits = ["base"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-${LOCAL_FLAVOR}" } platforms = ["linux/${LOCAL_ARCH}"] output = ["type=docker"] tags = ["gitlab-runner-helper:local"] } target "local-image-concrete" { inherits = ["base"] args = { BASE_IMAGE = "${RUNNER_IMAGES_REGISTRY}/runner-helper:${RUNNER_IMAGES_VERSION}-concrete" } platforms = ["linux/${LOCAL_ARCH}"] output = ["type=docker"] tags = ["gitlab-runner-helper:concrete"] dockerfile = "Dockerfile.concrete" } group "all" { targets = [ "alpine", "alpine-pwsh", "ubuntu", "ubuntu-pwsh", "ubi-fips", "windows", "concrete" ] } ================================================ FILE: docs/.markdownlint/.markdownlint-cli2.yaml ================================================ --- # Extended Markdown configuration to enforce no-trailing-spaces rule # To use this configuration, in the docs directory, run: # # markdownlint-cli2 --config .markdownlint/.markdownlint-cli2.yaml '**/*.md' config: default: false no-trailing-spaces: true noInlineConfig: true fix: true ================================================ FILE: docs/.markdownlint/rules/unnecessary_traversal.js ================================================ const path = require('path'); module.exports = { names: ['Custom rule/unnecessary-traversal'], description: 'Links should not traverse out and back into the same directory', tags: ['gitlab-docs', 'links'], function: (params, onError) => { // Get the current file directory name const { name: filePath = '', lines = [] } = params; const dirName = path.basename(path.dirname(filePath)); if (!filePath) return; // Process each line lines.forEach((line, i) => { // Skip lines that don't contain markdown links with relative paths if (!line.includes('](../')) return; // Regular expression to find markdown links with potential traversal issues const linkRegex = /\[([^\]]+)\]\((\.\.\/([^/]+)\/)(.*?)(?:\s+"[^"]*")?\)/g; let match; while ((match = linkRegex.exec(line)) !== null) { /* Destructure regex match into: - fullMatch: the entire link - linkText: the link text - traversalPart: the '../dir/' part - traversalDir: just the 'dir' part - targetPath: the rest of the path */ const [fullMatch, linkText, traversalPart, traversalDir, targetPath] = match; // Check if traversal directory matches current directory if (traversalDir === dirName) { // Calculate positions for precise highlighting const linkStart = match.index; const traversalStart = fullMatch.indexOf(traversalPart); onError({ lineNumber: i + 1, range: [linkStart + traversalStart, traversalPart.length], detail: `Link path does not need: '../${traversalDir}/'. Shorten link path to '[${linkText}](${targetPath})'`, fixInfo: { editColumn: linkStart + 1, deleteCount: fullMatch.length, insertText: `[${linkText}](${targetPath})`, }, }); } } }); }, }; ================================================ FILE: docs/.vale/gitlab_base/Ability.yml ================================================ --- name: gitlab_base.Ability description: | Focus on the feature, not the user's capabilities. extends: existence message: "Try to replace ('%s') with more precise language, unless this content is about security. See the word list for details." ignorecase: true vocab: false level: suggestion link: https://docs.gitlab.com/development/documentation/styleguide/word_list/#ability-able tokens: - ability to - ability - able to - able ================================================ FILE: docs/.vale/gitlab_base/AlertFormat.yml ================================================ --- name: gitlab_base.AlertFormat description: | Makes sure alerts use Markdown alerts, not hugo shortcodes. extends: existence message: "Use markdown alert box syntax for notes, warnings, and feature flag or disclaimer notes." link: https://docs.gitlab.com/development/documentation/styleguide/#alert-boxes vocab: false ignorecase: true level: error nonword: true scope: raw tokens: - '\{\{< alert type="(note|warning|flag|disclaimer)" >\}\}' - '^ *\*?\*?note\*?\*?:(?! ")' ================================================ FILE: docs/.vale/gitlab_base/BadPlurals.yml ================================================ --- name: gitlab_base.BadPlurals description: | Don't write plural words with the '(s)' construction. 'HTTP(S)' is acceptable. extends: existence message: "Rewrite '%s' to be plural without parentheses." link: https://docs.gitlab.com/development/documentation/styleguide/word_list/#s vocab: false level: warning ignorecase: true nonword: true tokens: - '(?>>>>>> .+)\n' ================================================ FILE: docs/.vale/gitlab_base/MultiLineLinks.yml ================================================ --- name: gitlab_base.MultiLineLinks description: | Checks that links are all on a single line. extends: existence message: "Put the full link on one line, even if the link is very long." link: https://docs.gitlab.com/development/documentation/styleguide/#links vocab: false level: error scope: raw raw: - '\[[^\[\]]*?\n[^\[\]]*?\]\([^\)]*?\)|' - '\[[^\[\]]*?\]\([^\)]*?\n[^\)]*\)' ================================================ FILE: docs/.vale/gitlab_base/NonStandardHyphens.yml ================================================ --- name: gitlab_base.NonStandardHyphens description: | Do not use non-standard dashes or hyphens. Use standard hyphen ("minus"), separate sentences, or commas instead: - U+2010: HYPHEN - U+2011: NON-BREAKING HYPHEN - U+2013: EN DASH - U+2014: EM DASH extends: existence message: "Do not use non-standard dashes or hyphens. Use standard hyphen ('minus'), separate sentences, or commas instead" vocab: false nonword: true level: warning link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation scope: text raw: - '[\u2010\u2011\u2013\u2014]' ================================================ FILE: docs/.vale/gitlab_base/NonStandardListDashes.yml ================================================ --- name: gitlab_base.NonStandardListDashes description: | Use only standard dashes (hyphens). Do not use: - U+2013: EN DASH - U+2014: EM DASH extends: existence message: "Do not use EN or EM dashes for list items." vocab: false level: error ignorecase: true nonword: true link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation scope: raw tokens: - '^ *?[\u2013\u2014]' ================================================ FILE: docs/.vale/gitlab_base/NonStandardQuotes.yml ================================================ --- name: gitlab_base.NonStandardQuotes description: | Use only standard single and double quotes, not left or right quotes. extends: existence message: "Use standard single quotes or double quotes only. Do not use left or right quotes." vocab: false level: warning ignorecase: true link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation scope: raw raw: - '[‘’“”]' ================================================ FILE: docs/.vale/gitlab_base/NonStandardSpaces.yml ================================================ --- name: gitlab_base.NonStandardSpaces description: | Use only standard spaces. Do not use: - U+202F: NARROW NO-BREAK SPACE [NNBSP] - U+00A0: NO-BREAK SPACE [NBSP] - U+200B: ZERO WIDTH SPACE [ZWSP] extends: existence message: "Use standard spaces only. Do not use no-break or zero width spaces." vocab: false level: error ignorecase: true link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation scope: raw raw: - '[\u202F\u00A0\u200B]' ================================================ FILE: docs/.vale/gitlab_base/Offerings.yml ================================================ --- name: gitlab_base.Offerings description: | Tests the offering information in the tier badges that appear below topic titles. For a list of all options, see https://docs.gitlab.com/development/documentation/styleguide/availability_details/#available-options extends: substitution message: "The offerings are 'GitLab Self-Managed' and 'GitLab Dedicated', with that exact capitalization." link: https://docs.gitlab.com/development/documentation/styleguide/availability_details/#available-options vocab: false level: warning action: name: replace ignorecase: false swap: - 'GitLab [Ss]elf-managed': GitLab Self-Managed - '(? 1" ================================================ FILE: docs/.vale/gitlab_base/Repetition.yml ================================================ --- name: gitlab_base.Repetition description: | Checks for duplicate words, like `the the` or `and and`. extends: repetition message: "Remove this duplicate word: '%s'." vocab: false level: error alpha: true tokens: - '[^\s]+' ================================================ FILE: docs/.vale/gitlab_base/SelfReferential.yml ================================================ --- name: gitlab_base.SelfReferential description: | Checks for wordy, self-referential phrases. extends: existence message: "Rewrite '%s'. Talk directly about the feature or purpose instead." ignorecase: true nonword: true vocab: false level: warning link: https://docs.gitlab.com/development/documentation/styleguide/#self-referential-writing tokens: - This (page|guide) (builds|contains|covers|describes|documents|explains|guides|lists|offers|provides|shows) ================================================ FILE: docs/.vale/gitlab_base/SentenceLength.yml ================================================ --- name: gitlab_base.SentenceLength description: | Counts words in a sentence and alerts if a sentence exceeds 25 words. extends: occurrence message: "Improve readability by using fewer than 25 words in this sentence." scope: sentence link: https://docs.gitlab.com/development/documentation/styleguide/#language level: suggestion max: 25 token: \b(\w+)\b ================================================ FILE: docs/.vale/gitlab_base/SentenceSpacing.yml ================================================ --- name: gitlab_base.SentenceSpacing description: | Checks for incorrect spacing (no spaces, or more than one space) around punctuation. extends: existence message: "Use exactly one space with punctuation. Check '%s' for spacing problems." link: https://docs.gitlab.com/development/documentation/styleguide/#punctuation vocab: false level: error nonword: true tokens: - '[a-z][.?!,][A-Z]' - '[\w.?!,\(\)\-":] {2,}[\w.?!,\(\)\-":]' - '[a-z] +[.?!,:] +' ================================================ FILE: docs/.vale/gitlab_base/Simplicity.yml ================================================ --- name: gitlab_base.Simplicity description: | Checks for words implying ease of use, to avoid cognitive dissonance for frustrated users. extends: existence message: "Remove '%s'. Be precise instead of subjective." vocab: false level: warning ignorecase: true link: https://docs.gitlab.com/development/documentation/styleguide/word_list/ tokens: - easy - easily - handy - simple - simply - useful ================================================ FILE: docs/.vale/gitlab_base/Spelling.yml ================================================ --- name: gitlab_base.Spelling description: | Checks for possible spelling mistakes in content, not code. Results from links using angle brackets () should be corrected. If a word is flagged as a spelling mistake incorrectly, such as a product name, you can submit an MR to update `spelling-exceptions.txt` with the missing word. Commands, like `git clone` must use backticks, and must not be added to the exceptions. extends: spelling message: "Check the spelling of '%s'. If the spelling is correct, ask a Technical Writer to add this word to the spelling exception list." vocab: false level: warning ignore: - gitlab_base/spelling-exceptions.txt ================================================ FILE: docs/.vale/gitlab_base/SubstitutionWarning.yml ================================================ --- name: gitlab_base.SubstitutionWarning description: | Checks for misused terms or common shorthand that should not be used at GitLab, but can't be flagged as errors. Substitutions.yml also exists. extends: substitution message: "Use '%s' instead of '%s'." link: https://docs.gitlab.com/development/documentation/styleguide/word_list/ vocab: false level: warning action: name: replace ignorecase: true swap: active user: "billable user" active users: "billable users" agnostic: "platform-independent' or 'vendor-neutral" air(?:-| )?gapped: "offline environment" bullet: "list item" (?\n\{\{<' ================================================ FILE: docs/.vale/gitlab_docs/HistoryItems.yml ================================================ --- name: gitlab_docs.HistoryItems description: | Ensures history items are properly formatted. extends: existence message: "History items must always use Hugo shortcodes and be a list with each line starting with '-', one item per line, even if there is only one item." link: https://docs.gitlab.com/development/documentation/feature_flags/#add-history-text vocab: false level: error nonword: true scope: raw tokens: - '\{\{< history >\}\}\n\n[^-]' - '^##.*?\n\n- \[?(Introduced|Changed|Renamed|Updated|Improved|Generally)' ================================================ FILE: docs/.vale/gitlab_docs/HistoryItemsOrder.yml ================================================ --- name: gitlab_docs.HistoryItemsOrder description: | Ensures history items come before the Details block. extends: existence message: "History items must follow the tier, offering, or status details." link: https://docs.gitlab.com/development/documentation/feature_flags/#add-history-text vocab: false level: error nonword: true scope: raw tokens: - '\{\{< /history >\}\}\n\n?\{\{< details >\}\}' ================================================ FILE: docs/.vale/gitlab_docs/ImagesOld.yml ================================================ --- name: gitlab_docs.ImagesOld description: | Checks for images that are not from supported versions of GitLab. extends: existence message: "Review this image. It might be out of date." link: https://docs.gitlab.com/development/documentation/styleguide/#anchor-links vocab: false level: suggestion scope: raw raw: - '!\[[^\]]*\]\([^\)]*_v(1[0-4]|[3-9])[^\)]*\)' ================================================ FILE: docs/.vale/gitlab_docs/InternalLinkCase.yml ================================================ --- name: gitlab_docs.InternalLinkCase description: | Checks that anchor fragments on internal links are in lower-case. extends: existence message: "Use lowercase for the anchor link." link: https://docs.gitlab.com/development/documentation/styleguide/#anchor-links vocab: false level: error scope: raw raw: - '(?}}' in Hugo shortcodes. For example {{< Yes >}}, not {{}}." link: https://docs.gitlab.com/development/documentation/styleguide/#gitlab-svg-icons vocab: false ignorecase: true level: warning nonword: true scope: raw tokens: - '\{\{<[^ /]|[^ /]>\}\}' ================================================ FILE: docs/.vale/gitlab_docs/TabsLinks.yml ================================================ --- name: gitlab_docs.TabsLinks description: | Checks for the presence of links to individual GitLab UI tabs. extends: existence message: "Do not include tabs query parameters in links." link: https://docs.gitlab.com/development/documentation/styleguide/#tabs vocab: false level: error scope: raw raw: - '\[[^\]]+\]\(.*?\.md\?tab=.*?\)' ================================================ FILE: docs/.vale/gitlab_docs/UIText.yml ================================================ --- name: gitlab_docs.UIText description: | Checks that bold around UI text is formatted properly in navigation descriptions. extends: existence message: "The '>' should not be in the bold emphasis in navigation descriptions. Make every step bold separately." link: https://docs.gitlab.com/development/documentation/styleguide/#how-to-write-navigation-task-steps vocab: false level: warning scope: raw raw: - '\*\*.*? > [^\*].*?\*\*' ================================================ FILE: docs/.vale/vale-json.tmpl ================================================ {{- /* Modify Vale's output https://vale.sh/manual/output/ */ -}} {{- $fileIndexes := len .Files -}} {{- $fileIndexes = sub $fileIndexes 1 -}} [ {{- /* Range over the linted files */ -}} {{- range $idx1, $a := .Files -}} {{- $path := .Path -}} {{/* Range over the file's alerts */}} {{- range $idx2, $b := .Alerts -}} {{- $error := "info" -}} {{- if eq .Severity "error" -}} {{- $error = "blocker" -}} {{- else if eq .Severity "warning" -}} {{- $error = "major" -}} {{- end}} {{- /* Variables setup */ -}} {{- $loc := printf "%d" .Line -}} {{- $message := printf "%s" .Message -}} {{- $moreinfo := "" -}} {{- if .Link -}} {{- $moreinfo = printf " See %s" .Link -}} {{- end -}} {{- if $idx2 -}},{{- end -}} {{/* Output */}} { "description": "{{ $message }}{{ $moreinfo }}", "fingerprint": "{{ $path }}-{{ $loc }}", "severity": "{{ $error }}", "location": { "path": "{{ $path }}", "lines": { "begin": {{ $loc }} } } } {{- end}}{{- if (lt $idx1 $fileIndexes) -}},{{- end -}} {{- end}} ] ================================================ FILE: docs/.vale/vale.tmpl ================================================ {{- /* Modify Vale's output https://docs.errata.ai/vale/cli#--output */ -}} {{- /* Keep track of our various counts */ -}} {{- $e := 0 -}} {{- $w := 0 -}} {{- $s := 0 -}} {{- /* Range over the linted files */ -}} {{- range .Files}} {{- $path := .Path | underline -}} {{- /* Range over the file's alerts */ -}} {{- range .Alerts -}} {{- $error := "" -}} {{- if eq .Severity "error" -}} {{- $error = .Severity | red -}} {{- $e = add1 $e -}} {{- else if eq .Severity "warning" -}} {{- $error = .Severity | yellow -}} {{- $w = add1 $w -}} {{- else -}} {{- $error = .Severity | blue -}} {{- $s = add1 $s -}} {{- end}} {{- /* Variables setup */ -}} {{- $path = $path -}} {{- $loc := printf "Line %d, position %d" .Line (index .Span 0) -}} {{- $check := printf "%s" .Check -}} {{- $message := printf "%s" .Message -}} {{- $link := printf "%s" .Link -}} {{- /* Output */ -}} {{ $path }}: {{ $loc }} (rule {{ $check }}) {{ $error }}: {{ $message }} More information: {{ $link }} {{end -}} {{end -}} {{- $e}} {{"errors" | red}}, {{$w}} {{"warnings" | yellow}}, and {{$s}} {{"suggestions" | blue}} found in {{.LintedTotal}} {{.LintedTotal | int | plural "file" "files"}}. ================================================ FILE: docs/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner is an application that works with GitLab CI/CD to run jobs in a pipeline. When developers push code to GitLab, they can define automated tasks in a `.gitlab-ci.yml` file. These tasks might include running tests, building applications, or deploying code. GitLab Runner is the application that executes these tasks on computing infrastructure. As an administrator, you are responsible for providing and managing the infrastructure where these CI/CD jobs run. This involves installing GitLab Runner applications, configuring them, and ensuring they have adequate capacity to handle your organization's CI/CD workload. ## What GitLab Runner does GitLab Runner connects to your GitLab instance and waits for CI/CD jobs. When a pipeline runs, GitLab sends jobs to available runners. The runner executes the job and reports the results back to GitLab. GitLab Runner has the following features. - Run multiple jobs concurrently. - Use multiple tokens with multiple servers (even per-project). - Limit the number of concurrent jobs per-token. - Jobs can be run: - Locally. - Using Docker containers. - Using Docker containers and executing job over SSH. - Using Docker containers with autoscaling on different clouds and virtualization hypervisors. - Connecting to a remote SSH server. - Is written in Go and distributed as single binary without any other requirements. - Supports Bash, PowerShell Core, and Windows PowerShell. - Works on GNU/Linux, macOS, and Windows (pretty much anywhere you can run Docker). - Allows customization of the job running environment. - Automatic configuration reload without restart. - Seamless setup with support for Docker, Docker-SSH, Parallels, or SSH running environments. - Enables caching of Docker containers. - Seamless installation as a service for GNU/Linux, macOS, and Windows. - Embedded Prometheus metrics HTTP server. - Referee workers to monitor and pass Prometheus metrics and other job-specific data to GitLab. ## Runner execution flow This diagram shows how runners are registered and how jobs are requested and handled. It also shows which actions use [registration and authentication tokens](https://docs.gitlab.com/api/runners/#registration-and-authentication-tokens), and [job tokens](https://docs.gitlab.com/ci/jobs/ci_job_token/). ```mermaid sequenceDiagram participant GitLab participant GitLabRunner participant Executor opt registration GitLabRunner ->>+ GitLab: POST /api/v4/runners with registration_token GitLab -->>- GitLabRunner: Registered with runner_token end loop job requesting and handling GitLabRunner ->>+ GitLab: POST /api/v4/jobs/request with runner_token GitLab -->>+ GitLabRunner: job payload with job_token GitLabRunner ->>+ Executor: Job payload Executor ->>+ GitLab: clone sources with job_token Executor ->>+ GitLab: download artifacts with job_token Executor -->>- GitLabRunner: return job output and status GitLabRunner -->>- GitLab: updating job output and status with job_token end ``` ## Runner deployment options ### GitLab-hosted runners [GitLab-hosted runners](https://docs.gitlab.com/ci/runners/) are managed by GitLab and available on GitLab.com. You don't need to install or maintain these runners - GitLab provides them as a service. However, you have limited control over the execution environment and cannot customize the infrastructure. ### Self-managed runners Self-managed runners are GitLab Runner instances that you install, configure, and manage in your own infrastructure. You can [install](install/_index.md) and register self-managed runners on all GitLab installations. As an administrator, you typically work with self-managed runners. Unlike GitLab-hosted runners, which are hosted and managed by GitLab, you have complete control over self-managed runners. ## GitLab Runner versions For compatibility reasons, the GitLab Runner [major.minor](https://en.wikipedia.org/wiki/Software_versioning) version should stay in sync with the GitLab major and minor version. Older runners may still work with newer GitLab versions, and vice versa. However, features may not be available or work properly if a version difference exists. Backward compatibility is guaranteed between minor version updates. However, sometimes minor version updates of GitLab can introduce new features that require GitLab Runner to be on the same minor version. If you host your own runners but host your repositories on GitLab.com, keep GitLab Runner [updated](install/_index.md) to the latest version, as GitLab.com is [updated continuously](https://gitlab.com/gitlab-org/release/tasks/-/issues). ## Troubleshooting Learn how to [troubleshoot](faq/_index.md) common issues. ## Glossary - **GitLab Runner**: The application that executes CI/CD jobs from GitLab pipelines on a target computing platform. - **Runner**: A configured instance of GitLab Runner that can execute jobs. Depending on the type of executor, this machine could be local to the runner manager (`shell` or `docker` executor) or a remote machine created by an autoscaler (`docker-autoscaler` or `kubernetes`). - **Runner configuration**: A single `[[runner]]` entry in the `config.toml` that displays as a **runner** in the UI. - **Runner manager**: The process that reads the `config.toml` file and runs all the runner configurations and job executions concurrently. - **Machine**: A virtual machine (VM) or pod that the runner operates in. GitLab Runner automatically generates a unique, persistent machine ID so that when multiple machines are given the same runner configuration, jobs can be routed separately but the runner configurations are grouped in the UI. - **Executor**: The method GitLab Runner uses to execute jobs (Docker, Shell, Kubernetes, etc.). - **Pipeline**: A collection of jobs that run automatically when code is pushed to GitLab. - **Job**: A single task in a pipeline, such as running tests or building an application. - **Runner token**: A unique identifier that allows a runner to authenticate with GitLab. - **Tags**: Labels assigned to runners that determine which jobs they can execute. - **Concurrent jobs**: The number of jobs a runner can execute simultaneously. - **Self-managed runner**: A runner installed and managed on your own infrastructure. - **GitLab-hosted runner**: A runner provided and managed by GitLab. For more information, see the official [GitLab Word List](https://docs.gitlab.com/development/documentation/styleguide/word_list/#gitlab-runner) and the GitLab Architecture entry for [GitLab Runner](https://docs.gitlab.com/development/architecture/#gitlab-runner). ## Contributing Contributions are welcome. See [`CONTRIBUTING.md`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CONTRIBUTING.md) and the [development documentation](development/_index.md) for details. If you're a reviewer of GitLab Runner project, take a moment to read the [Reviewing GitLab Runner](development/reviewing-gitlab-runner.md) document. You can also review [the release process for the GitLab Runner project](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/PROCESS.md). ## Changelog See the [CHANGELOG](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CHANGELOG.md) to view recent changes. ## License This code is distributed under the MIT license. View the [LICENSE](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/LICENSE) file. ================================================ FILE: docs/commands/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner commands --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner contains a set of commands you use to register, manage, and run your builds. You can check the list of commands by executing: ```shell gitlab-runner --help ``` Append `--help` after a command to see its specific help page: ```shell gitlab-runner --help ``` ## Using environment variables Most of the commands support environment variables as a method to pass the configuration to the command. You can see the name of the environment variable when invoking `--help` for a specific command. For example, you can see below the help message for the `run` command: ```shell gitlab-runner run --help ``` The output is similar to: ```plaintext NAME: gitlab-runner run - run multi runner service USAGE: gitlab-runner run [command options] [arguments...] OPTIONS: -c, --config "/Users/ayufan/.gitlab-runner/config.toml" Config file [$CONFIG_FILE] ``` ## Running in debug mode When you're looking for the cause of an undefined behavior or error, use debug mode. To run a command in debug mode, prepend the command with `--debug`: ```shell gitlab-runner --debug ``` ## Super-user permission Commands that access the configuration of GitLab Runner behave differently when executed as super-user (`root`). The file location depends on the user executing the command. When you execute `gitlab-runner` commands, you see the mode it is running in: ```shell $ gitlab-runner run INFO[0000] Starting multi-runner from /Users/ayufan/.gitlab-runner/config.toml ... builds=0 WARN[0000] Running in user-mode. WARN[0000] Use sudo for system-mode: WARN[0000] $ sudo gitlab-runner... ``` You should use `user-mode` if you are sure this is the mode you want to work with. Otherwise, prefix your command with `sudo`: ```shell $ sudo gitlab-runner run INFO[0000] Starting multi-runner from /etc/gitlab-runner/config.toml ... builds=0 INFO[0000] Running in system-mode. ``` In the case of Windows, you may need to run the command prompt as an administrator. ## Configuration file GitLab Runner configuration uses the [TOML](https://github.com/toml-lang/toml) format. You can find the file to be edited: 1. On \*nix systems when GitLab Runner is executed as super-user (`root`): `/etc/gitlab-runner/config.toml` 1. On \*nix systems when GitLab Runner is executed as non-root: `~/.gitlab-runner/config.toml` 1. On other systems: `./config.toml` Most of the commands accept an argument to specify a custom configuration file, so you can have a multiple different configurations on a single machine. To specify a custom configuration file, use the `-c` or `--config` flag, or use the `CONFIG_FILE` environment variable. ## Signals You can use system signals to interact with GitLab Runner. The following commands support the following signals: | Command | Signal | Action | |---------------------|---------------------|--------| | `register` | `SIGINT` | Cancel runner registration and delete if it was already registered. | | `run`, `run-single` | `SIGINT`, `SIGTERM` | Abort all running builds and exit as soon as possible. Use twice to exit now (**forceful shutdown**). | | `run`, `run-single` | `SIGQUIT` | Stop accepting new builds. Exit as soon as the running builds finish (**graceful shutdown**). | | `run` | `SIGHUP` | Force to reload configuration file. | For example, to force a reload of a runner's configuration file, run: ```shell sudo kill -SIGHUP ``` For [graceful shutdowns](#gitlab-runner-stop-doesnt-shut-down-gracefully): ```shell sudo kill -SIGQUIT ``` > [!warning] > Do **not** use `killall` or `pkill` for graceful shutdowns if you are using `shell` > or `docker` executors. This can cause improper handling of the signals due to sub-processes > being killed as well. Use it only on the main process handling the jobs. Some operating systems are configured to automatically restart services when they fail (which is the default on some platforms). If your operating system has this configuration, it might automatically restart the runner if it is shut down by the signals above. ## Commands overview You see the following if you run `gitlab-runner` without any arguments: ```plaintext NAME: gitlab-runner - a GitLab Runner USAGE: gitlab-runner [global options] command [command options] [arguments...] VERSION: 17.10.1 (ef334dcc) AUTHOR: GitLab Inc. COMMANDS: list List all configured runners run run multi runner service register register a new runner reset-token reset a runner's token install install service uninstall uninstall service start start service stop stop service restart restart service status get status of a service run-single start single runner unregister unregister specific runner verify verify all registered runners wrapper start multi runner service wrapped with gRPC manager server fleeting manage fleeting plugins artifacts-downloader download and extract build artifacts (internal) artifacts-uploader create and upload build artifacts (internal) cache-archiver create and upload cache artifacts (internal) cache-extractor download and extract cache artifacts (internal) cache-init changed permissions for cache paths (internal) health-check check health for a specific address proxy-exec execute internal commands (internal) read-logs reads job logs from a file, used by kubernetes executor (internal) help, h Shows a list of commands or help for one command GLOBAL OPTIONS: --cpuprofile value write cpu profile to file [$CPU_PROFILE] --debug debug mode [$RUNNER_DEBUG] --log-format value Choose log format (options: runner, text, json) [$LOG_FORMAT] --log-level value, -l value Log level (options: debug, info, warn, error, fatal, panic) [$LOG_LEVEL] --help, -h show help --version, -v print the version ``` Below we explain what each command does in detail. ## Registration-related commands Use the following commands to register a new runner, or list and verify them if they are still registered. - [`gitlab-runner register`](#gitlab-runner-register) - [Interactive registration](#interactive-registration) - [Non-interactive registration](#non-interactive-registration) - [`gitlab-runner list`](#gitlab-runner-list) - [`gitlab-runner verify`](#gitlab-runner-verify) - [`gitlab-runner unregister`](#gitlab-runner-unregister) These commands support the following arguments: | Parameter | Default | Description | |------------|-----------------------------------------------------------|-------------| | `--config` | See the [configuration file section](#configuration-file) | Specify a custom configuration file to be used | ### `gitlab-runner register` This command registers your runner in GitLab by using the GitLab [Runners API](https://docs.gitlab.com/api/runners/). The registered runner is added to the [configuration file](#configuration-file). You can use multiple configurations in a single installation of GitLab Runner. Executing `gitlab-runner register` adds a new configuration entry. It doesn't remove the previous ones. You can register a runner: - interactively. - non-interactively. > [!note] > Runners can be registered directly by using the GitLab [Runners API](https://docs.gitlab.com/api/runners/) but > configuration is not generated automatically. #### Interactive registration This command is usually used in interactive mode (**default**). You are asked multiple questions during a runner's registration. This question can be pre-filled by adding arguments when invoking the registration command: ```shell gitlab-runner register --name my-runner --url "http://gitlab.example.com" --token my-authentication-token ``` Or by configuring the environment variable before the `register` command: ```shell export CI_SERVER_URL=http://gitlab.example.com export RUNNER_NAME=my-runner export CI_SERVER_TOKEN=my-authentication-token gitlab-runner register ``` To check all possible arguments and environments execute: ```shell gitlab-runner register --help ``` #### Non-interactive registration It's possible to use registration in non-interactive / unattended mode. You can specify the arguments when invoking the registration command: ```shell gitlab-runner register --non-interactive ``` Or by configuring the environment variable before the `register` command: ```shell export REGISTER_NON_INTERACTIVE=true gitlab-runner register ``` > [!note] > Boolean parameters must be passed in the command line with `--key={true|false}`. #### `[[runners]]` configuration template file {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4228) in GitLab Runner 12.2. {{< /history >}} Additional options can be configured during runner registration by using the [configuration template file](../register/_index.md#register-with-a-configuration-template) feature. ### `gitlab-runner list` This command lists all runners saved in the [configuration file](#configuration-file). ### `gitlab-runner verify` This command verifies that the registered runners can connect to GitLab. But, it doesn't verify if the runners are being used by the GitLab Runner service. An example output is: ```plaintext Verifying runner... is alive runner=fee9938e Verifying runner... is alive runner=0db52b31 Verifying runner... is alive runner=826f687f Verifying runner... is alive runner=32773c0f ``` To remove the old runners that have been removed from GitLab, execute the following command. > [!warning] > This operation cannot be undone. It updates the configuration file, so > make sure to have a backup of `config.toml` before executing it. ```shell gitlab-runner verify --delete ``` ### `gitlab-runner unregister` This command unregisters registered runners by using the GitLab [Runners API](https://docs.gitlab.com/api/runners/#delete-a-runner). It expects either: - A full URL and the runner's token. - The runner's name. With the `--all-runners` option, it unregisters all the attached runners. > [!note] > Runners can be unregistered with the GitLab [Runners API](https://docs.gitlab.com/api/runners/#delete-a-runner) but the > configuration is not modified for the user. - If the runner was created with a runner registration token, `gitlab-runner unregister` with the runner authentication token deletes the runner. - If the runner was created in the GitLab UI or with the Runners API, `gitlab-runner unregister` with the runner authentication token deletes the runner manager, but not the runner. To completely remove the runner, [delete the runner in the runners administration page](https://docs.gitlab.com/ci/runners/runners_scope/#delete-instance-runners) or use the [`DELETE /runners`](https://docs.gitlab.com/api/runners/#delete-a-runner) REST API endpoint. To unregister a single runner, first get the runner's details by executing `gitlab-runner list`: ```plaintext test-runner Executor=shell Token=t0k3n URL=http://gitlab.example.com ``` Then use this information to unregister it, using one of the following commands. > [!warning] > This operation cannot be undone. It updates the configuration file, so > make sure to have a backup of `config.toml` before executing it. #### By URL and token ```shell gitlab-runner unregister --url "http://gitlab.example.com/" --token t0k3n ``` #### By name ```shell gitlab-runner unregister --name test-runner ``` > [!note] > If there is more than one runner with the given name, only the first one is removed. #### All runners ```shell gitlab-runner unregister --all-runners ``` ### `gitlab-runner reset-token` This command resets a runner's token by using the GitLab Runners API, with either the [runner ID](https://docs.gitlab.com/api/runners/#reset-runners-authentication-token-by-using-the-runner-id) or the [current token](https://docs.gitlab.com/api/runners/#reset-runners-authentication-token-by-using-the-current-token). It expects the runner's name (or URL and ID), and an optional PAT if resetting by runner ID. The PAT and runner ID are intended to be used if the token has already expired. With the `--all-runners` option, it resets all the attached runners' tokens. #### With runner's current token ```shell gitlab-runner reset-token --name test-runner ``` #### With PAT and runner name ```shell gitlab-runner reset-token --name test-runner --pat PaT ``` #### With PAT, GitLab URL, and runner ID ```shell gitlab-runner reset-token --url "https://gitlab.example.com/" --id 12345 --pat PaT ``` #### All runners ```shell gitlab-runners reset-token --all-runners ``` ## Service-related commands The following commands allow you to manage the runner as a system or user service. Use them to install, uninstall, start, and stop the runner service. - [`gitlab-runner install`](#gitlab-runner-install) - [`gitlab-runner uninstall`](#gitlab-runner-uninstall) - [`gitlab-runner start`](#gitlab-runner-start) - [`gitlab-runner stop`](#gitlab-runner-stop) - [`gitlab-runner restart`](#gitlab-runner-restart) - [`gitlab-runner status`](#gitlab-runner-status) - [Multiple services](#multiple-services) - [**Access Denied** when running the service-related commands](#access-denied-when-running-the-service-related-commands) All service related commands accept these arguments: | Parameter | Default | Description | |------------------|---------------------------------------------------|-------------| | `--service` | `gitlab-runner` | Specify custom service name | | `--config` | See the [configuration file](#configuration-file) | Specify a custom configuration file to use | | `--user-service` | See [user service](#user-service) | Configure GitLab Runner to run as a user service (systemd) | ### `gitlab-runner install` This command installs GitLab Runner as a service. It accepts different sets of arguments depending on which system it's run on. When run on **Windows** or as super-user, it accepts the `--user` flag which allows you to drop privileges of builds run with the **shell** executor. | Parameter | Default | Description | |-----------------------|---------------------------------------------------|-------------| | `--service` | `gitlab-runner` | Specify service name to use | | `--config` | See the [configuration file](#configuration-file) | Specify a custom configuration file to use | | `--syslog` | `true` (for non systemd systems) | Specify if the service should integrate with system logging service | | `--working-directory` | the current directory | Specify the root directory where all data is stored when builds are run with the **shell** executor | | `--user` | `root` | Specify the user that executes the builds | | `--password` | none | Specify the password for the user that executes the builds | ### `gitlab-runner uninstall` This command stops and uninstalls GitLab Runner from being run as an service. ### `gitlab-runner start` This command starts the GitLab Runner service. ### `gitlab-runner stop` This command stops the GitLab Runner service. ### `gitlab-runner restart` This command stops and then starts the GitLab Runner service. ### `gitlab-runner status` This command prints the status of the GitLab Runner service. The exit code is zero when the service is running and non-zero when the service is not running. ### Multiple services By specifying the `--service` flag, it is possible to have multiple GitLab Runner services installed, with multiple separate configurations. ### User service You can use some init systems (like `systemd`) to manage services as [user services](https://wiki.archlinux.org/title/Systemd/User). If your init system provides this feature and you want to manage the `gitlab-runner` service as a user service, specify the `--user-service` flag when you run service-related commands. ## Run-related commands This command allows to fetch and process builds from GitLab. ### `gitlab-runner run` The `gitlab-runner run` command is the main command that is executed when GitLab Runner is started as a service. It reads all defined runners from `config.toml` and tries to run all of them. The command is executed and works until it [receives a signal](#signals). It accepts the following parameters. | Parameter | Default | Description | |-----------------------|-----------------------------------------------|-------------| | `--config` | See [configuration-file](#configuration-file) | Specify a custom configuration file to be used | | `--working-directory` | the current directory | Specify the root directory where all data is stored when builds run with the **shell** executor | | `--user` | the current user | Specify the user that executes builds | | `--syslog` | `false` | Send all logs to SysLog (Unix) or EventLog (Windows) | | `--listen-address` | empty | Address (`:`) on which the Prometheus metrics HTTP server should be listening | ### `gitlab-runner run-single` {{< history >}} - Ability to use a configuration file [introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37670) in GitLab Runner 17.1. {{< /history >}} Use this supplementary command to run a single build from a single GitLab instance. It can: - Take all options either as CLI parameters or environment variables, including the GitLab URL and Runner token. For example, a single job with all parameters specified explicitly: ```shell gitlab-runner run-single -u http://gitlab.example.com -t my-runner-token --executor docker --docker-image ruby:3.3 ``` - Read from a configuration file to use a specific runner's configuration. For example, a single job with a configuration file: ```shell gitlab-runner run-single -c ~/.gitlab-runner/config.toml -r runner-name ``` You can see all possible configuration options by using the `--help` flag: ```shell gitlab-runner run-single --help ``` You can use the `--max-builds` option to control how many builds the runner executes before exiting. The default of `0` means that the runner has no build limit and jobs run forever. You can also use the `--wait-timeout` option to control how long the runner waits for a job before exiting. The default of `0` means that the runner has no timeout and waits forever between jobs. ## Internal commands GitLab Runner is distributed as a single binary and contains a few internal commands that are used during builds. ### `gitlab-runner artifacts-downloader` Download the artifacts archive from GitLab. ### `gitlab-runner artifacts-uploader` Upload the artifacts archive to GitLab. ### `gitlab-runner cache-archiver` Create a cache archive, store it locally or upload it to an external server. ### `gitlab-runner cache-extractor` Restore the cache archive from a locally or externally stored file. ## Troubleshooting Below are some common pitfalls. ### **Access Denied** when running the service-related commands Usually the [service related commands](#service-related-commands) require administrator privileges: - On Unix (Linux, macOS, FreeBSD) systems, prefix `gitlab-runner` with `sudo` - On Windows systems use the elevated command prompt. Run an `Administrator` command prompt. To write `Command Prompt` in the Windows search field, right-click and select `Run as administrator`. Confirm that you want to execute the elevated command prompt. ## `gitlab-runner stop` doesn't shut down gracefully When GitLab Runner is installed on a host and runs local executors, it starts additional processes for operations like downloading or uploading artifacts, or handling cache. These processes are executed as `gitlab-runner` commands, which means that you can use `pkill -QUIT gitlab-runner` or `killall QUIT gitlab-runner` to kill them. When you kill them, the operations they are responsible for fail. Here are two ways to prevent this: - Register the runner as a local service (like `systemd`) with `SIGQUIT` as the kill signal, and use `gitlab-runner stop` or `systemctl stop gitlab-runner.service`. Here is an example configuration to enable this behavior: ```ini ; /etc/systemd/system/gitlab-runner.service.d/kill.conf [Service] KillSignal=SIGQUIT TimeoutStopSec=infinity ``` - To apply the configuration change, after you create this file, reload `systemd` with `systemctl daemon-reload`. - Manually kill the process with `kill -SIGQUIT `. You have to find the `pid` of the main `gitlab-runner` process. You can find this by looking at logs, as it's displayed on startup: ```shell $ gitlab-runner run Runtime platform arch=arm64 os=linux pid=8 revision=853330f9 version=16.5.0 ``` ### Saving system ID state file: access denied GitLab Runner 15.7 and 15.8 might not start if it lacks write permissions for the directory that contains the `config.toml` file. When GitLab Runner starts, it searches for the `.runner_system_id` file in the directory that contains the `config.toml`. If it cannot find the `.runner_system_id` file, it creates a new one. If GitLab Runner doesn't have write permissions, it fails to start. To resolve this issue, temporarily allow file write permissions, then run `gitlab-runner run`. After the `.runner_system_id` file is created, you can reset the permissions to read-only. ================================================ FILE: docs/configuration/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Configuration, certificates, autoscaling, proxy setup. title: Configure GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Learn how to configure GitLab Runner. - [Advanced configuration options](advanced-configuration.md): Use the [`config.toml`](https://github.com/toml-lang/toml) configuration file to edit runner settings. - [Use self-signed certificates](tls-self-signed.md): Configure certificates that verify TLS peers when connecting to the GitLab server. - [Autoscale with Docker Machine](autoscale.md): Execute jobs on machines created automatically by Docker Machine. - [Autoscale GitLab Runner on AWS EC2](runner_autoscale_aws/_index.md): Execute jobs on auto-scaled AWS EC2 instances. - [Autoscale GitLab CI on AWS Fargate](runner_autoscale_aws_fargate/_index.md): Use the AWS Fargate driver with the GitLab custom executor to run jobs in AWS ECS. - [Graphical Processing Units](gpus.md): Use GPUs to execute jobs. - [The init system](init.md): GitLab Runner installs its init service files based on your operating system. - [Supported shells](../shells/_index.md): Execute builds on different systems by using shell script generators. - [Security considerations](../security/_index.md): Be aware of potential security implications when running your jobs with GitLab Runner. - [Runner monitoring](../monitoring/_index.md): Monitor the behavior of your runners. - [Clean up Docker cache automatically](../executors/docker.md#clear-the-docker-cache): If you are running low on disk space, use a cron job to clean old containers and volumes. - [Configure GitLab Runner to run behind a proxy](proxy.md): Set up a Linux proxy and configure GitLab Runner. This setup works well with the Docker executor. - [Configure GitLab Runner for Oracle Cloud Infrastructure (OCI)](oracle_cloud_performance.md): Optimize your GitLab Runner performance in OCI. - [Handling rate limited requests](proxy.md#handling-rate-limited-requests). - [Configure GitLab Runner Operator](configuring_runner_operator.md). ================================================ FILE: docs/configuration/advanced-configuration.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Advanced configuration --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} To change the behavior of GitLab Runner and individual registered runners, modify the `config.toml` file. You can find the `config.toml` file in: - `/etc/gitlab-runner/` on \*nix systems when GitLab Runner is executed as root. This directory is also the path for service configuration. - `~/.gitlab-runner/` on \*nix systems when GitLab Runner is executed as non-root. - `./` on other systems. GitLab Runner does not require a restart when you change most options. This includes parameters in the `[[runners]]` section and most parameters in the global section, except for `listen_address`. If a runner was already registered, you don't need to register it again. GitLab Runner checks for configuration modifications every 3 seconds and reloads if necessary. GitLab Runner also reloads the configuration in response to the `SIGHUP` signal. ## Configuration validation {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3924) in GitLab Runner 15.10 {{< /history >}} Configuration validation is a process that checks the structure of the `config.toml` file. The output from the configuration validator provides only `info` level messages. The configuration validation process is for informational purposes only. You can use the output to identify potential issues with your runner configuration. The configuration validation might not catch all possible problems, and the absence of messages does not guarantee that the `config.toml` file is flawless. ## The global section These settings are global. They apply to all runners. | Setting | Description | |----------------------|-------------| | `concurrent` | Limits how many jobs can run concurrently, across all registered runners. Each `[[runners]]` section can define its own limit, but this value sets a maximum for all of those values combined. For example, a value of `10` means no more than 10 jobs can run concurrently. `0` is forbidden. If you use this value, the runner process exits with a critical error. View how this setting works with the [Docker Machine executor](autoscale.md#limit-the-number-of-vms-created-by-the-docker-machine-executor), [Instance executor](../executors/instance.md), [Docker Autoscaler executor](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance), and [`runners.custom_build_dir` configuration](#the-runnerscustom_build_dir-section). | | `log_level` | Defines the log level. Options are `debug`, `info`, `warn`, `error`, `fatal`, and `panic`. This setting has lower priority than the level set by the command-line arguments `--debug`, `-l`, or `--log-level`. | | `log_format` | Specifies the log format. Options are `runner`, `text`, and `json`. This setting has lower priority than the format set by command-line argument `--log-format`. The default value is `runner`, which contains ANSI escape codes for coloring. | | `check_interval` | Defines the interval length, in seconds, between the runner checking for new jobs. The default value is `3`. If set to `0` or lower, the default value is used. | | `sentry_dsn` | Enables tracking of all system level errors to Sentry. | | `connection_max_age` | The maximum duration a TLS keepalive connection to the GitLab server should remain open before reconnecting. The default value is `15m` for 15 minutes. If set to `0` or lower, the connection persists as long as possible. | | `listen_address` | Defines an address (`:`) the Prometheus metrics HTTP server should listen on. | | `shutdown_timeout` | Number of seconds until the [forceful shutdown operation](../commands/_index.md#signals) times out and exits the process. The default value is `30`. If set to `0` or lower, the default value is used. | ### Configuration warnings #### Long polling issues GitLab Runner can experience long polling issues in several configuration scenarios when GitLab long polling is turned on through GitLab Workhorse. These range from performance bottlenecks to severe processing delays, depending on the configuration. GitLab Runner workers can get stuck in long polling requests for extended periods (matches the GitLab Workhorse configuration `-apiCiLongPollingDuration`, which defaults to 50 seconds), preventing other jobs from being processed promptly. This issue is related to GitLab CI/CD long polling feature, which is controlled by the GitLab Workhorse `-apiCiLongPollingDuration` setting. When turned on, job requests can block for up to the configured duration while they wait for jobs to become available. The default GitLab Workhorse long polling configuration value is 50 seconds (turned on by default in recent GitLab versions). The following are some configuration examples: - Omnibus: `gitlab_workhorse['api_ci_long_polling_duration'] = "50s"` in `/etc/gitlab/gitlab.rb` - Helm chart: Use the `gitlab.webservice.workhorse.extraArgs` setting - CLI: `gitlab-workhorse -apiCiLongPollingDuration 50s` For more information, see: - [Long polling for runners](https://docs.gitlab.com/ci/runners/long_polling/) - [Workhorse configuration](https://docs.gitlab.com/development/workhorse/configuration/) Symptoms: - Jobs from some projects experience delays before starting (duration matches your GitLab instance long polling timeout) - Jobs from other projects run immediately - Warning message in runner logs: `CONFIGURATION: Long polling issues detected` Common problematic scenarios: - Worker starvation bottleneck: The `concurrent` setting is less than the number of runners (severe bottleneck) - Request bottleneck: Runners with `request_concurrency=1` cause job delays during long polling - Build limit bottleneck: Runners with low `limit` settings (≤2) combined with `request_concurrency=1` GitLab Runner automatically detects the problem scenarios and provides tailored solutions in the warning messages. Common solutions include: - Increase the `concurrent` setting to exceed the number of runners. - Set the `request_concurrency` value for high-volume runners to a value higher than 1 (default is 1). Consider turning on [runner monitoring](../monitoring/_index.md) to understand the state of your system and find the best value for the setting. Consider using the `FF_USE_ADAPTIVE_REQUEST_CONCURRENCY` feature flag to automatically adjust `request_concurrency` based on workload. For information about adaptive concurrency, see the [feature flags documentation](feature-flags.md). - Balance `limit` settings with expected job volume. ##### Example problematic configurations Scenario 1: Worker starvation bottleneck: ```toml concurrent = 2 # Only 2 concurrent workers [[runners]] name = "runner-1" [[runners]] name = "runner-2" [[runners]] name = "runner-3" # 3 runners, only 2 workers - severe bottleneck ``` Scenario 2: Request bottleneck: ```toml concurrent = 4 # 4 workers available [[runners]] name = "high-volume-runner" request_concurrency = 1 # Default: only 1 request at a time limit = 10 # Can handle 10 jobs, but only 1 request slot ``` Scenario 3: Build limit bottleneck: ```toml concurrent = 4 [[runners]] name = "limited-runner" limit = 2 # Only 2 builds allowed request_concurrency = 1 # Only 1 request at a time # Creates severe bottleneck: builds at capacity + request slot blocked by long polling ``` ##### Example corrected configuration ```toml concurrent = 4 # Adequate worker capacity [[runners]] name = "high-volume-runner" request_concurrency = 3 # Allow multiple simultaneous requests limit = 10 [[runners]] name = "balanced-runner" request_concurrency = 2 limit = 5 ``` Here's a configuration example: ```toml # Example `config.toml` file concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file log_level = "warning" log_format = "text" check_interval = 3 # Value in seconds [[runners]] name = "first" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "shell" (...) [[runners]] name = "second" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker" (...) [[runners]] name = "third" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker-autoscaler" (...) ``` ### `log_format` examples (truncated) #### `runner` ```shell Runtime platform arch=amd64 os=darwin pid=37300 revision=HEAD version=development version Starting multi-runner from /etc/gitlab-runner/config.toml... builds=0 WARNING: Running in user-mode. WARNING: Use sudo for system-mode: WARNING: $ sudo gitlab-runner... Configuration loaded builds=0 listen_address not defined, metrics & debug endpoints disabled builds=0 [session_server].listen_address not defined, session endpoints disabled builds=0 ``` #### `text` ```shell INFO[0000] Runtime platform arch=amd64 os=darwin pid=37773 revision=HEAD version="development version" INFO[0000] Starting multi-runner from /etc/gitlab-runner/config.toml... builds=0 WARN[0000] Running in user-mode. WARN[0000] Use sudo for system-mode: WARN[0000] $ sudo gitlab-runner... INFO[0000] INFO[0000] Configuration loaded builds=0 INFO[0000] listen_address not defined, metrics & debug endpoints disabled builds=0 INFO[0000] [session_server].listen_address not defined, session endpoints disabled builds=0 ``` #### `json` ```shell {"arch":"amd64","level":"info","msg":"Runtime platform","os":"darwin","pid":38229,"revision":"HEAD","time":"2025-06-05T15:57:35+02:00","version":"development version"} {"builds":0,"level":"info","msg":"Starting multi-runner from /etc/gitlab-runner/config.toml...","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"Running in user-mode.","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"Use sudo for system-mode:","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"$ sudo gitlab-runner...","time":"2025-06-05T15:57:35+02:00"} {"level":"info","msg":"","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"Configuration loaded","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"listen_address not defined, metrics \u0026 debug endpoints disabled","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"[session_server].listen_address not defined, session endpoints disabled","time":"2025-06-05T15:57:35+02:00"} ``` ### How `check_interval` works If `config.toml` has more than one `[[runners]]` section, GitLab Runner contains a loop that constantly schedules job requests to the GitLab instance where GitLab Runner is configured. The following example has `check_interval` of 10 seconds and two `[[runners]]` sections (`runner-1` and `runner-2`). GitLab Runner sends a request every 10 seconds and sleeps for five seconds: 1. Get `check_interval` value (`10s`). 1. Get list of runners (`runner-1`, `runner-2`). 1. Calculate the sleep interval (`10s / 2 = 5s`). 1. Start an infinite loop: 1. Request a job for `runner-1`. 1. Sleep for `5s`. 1. Request a job for `runner-2`. 1. Sleep for `5s`. By default, when a runner receives a job, it immediately re-polls for more jobs until no jobs are available or the number of running jobs reaches `concurrent` or `limit`. To change this behavior, set `strict_check_interval` to `true`. When enabled, the runner strictly respects the check interval and sends one request every `check_interval` seconds (5 seconds in this example), regardless of whether a job was received. Turn on this setting to improve job distribution across a fleet of runners and prevent one runner from handling most jobs while others remain idle. However, jobs might wait longer in the queue. Here's a `check_interval` configuration example: ```toml # Example `config.toml` file concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file. log_level = "warning" log_format = "json" check_interval = 10 # Value in seconds [[runners]] name = "runner-1" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "shell" (...) [[runners]] name = "runner-2" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker" (...) ``` In this example, a job request from the runner's process is made every five seconds. If `runner-1` and `runner-2` are connected to the same GitLab instance, this GitLab instance also receives a new request from this runner every five seconds. Two sleep periods occur between the first and second requests for `runner-1`. Each period takes five seconds, so it's approximately 10 seconds between subsequent requests for `runner-1`. The same applies for `runner-2`. If you define more runners, the sleep interval is smaller. However, a request for a runner is repeated after all requests for the other runners and their sleep periods are called. ## The `[machine]` section {{< history >}} - Introduced in GitLab Runner 18.10. {{< /history >}} The `[machine]` section configures global settings for the `docker+machine` executor provider. These settings apply to all runners that use the `docker+machine` executor. ### The `[machine.shutdown_drain]` section When the runner process shuts down, idle machines in the pool are typically left to run. You must clean them up externally (for example, through a `systemd` post-stop hook). The `shutdown_drain` section configures the runner to automatically remove idle machines during shutdown. | Parameter | Type | Description | |-----------------|----------|-------------| | `enabled` | boolean | Turn on automatic removal of idle machines on shutdown. Default: `false`. | | `concurrency` | integer | Number of machines to remove concurrently. Default: `3`. | | `max_retries` | integer | Maximum retry attempts per machine. Default: `3`. | | `retry_backoff` | duration | Base backoff duration between retries (multiplied by attempt number). Default: `5s`. | > [!note] > The drain operation uses the global [`shutdown_timeout`](#the-global-section) setting. > The default timeout of 30 seconds is usually too short for draining machines. > When you turn on shutdown drain, increase `shutdown_timeout` to allow enough time > for all machines to be removed. A minimum of 5 minutes is recommended, but larger > pools may require longer timeouts. The runner logs a warning if the timeout is > too short. Example: ```toml concurrent = 10 check_interval = 0 shutdown_timeout = 600 # 10 minutes - required for draining machines [machine] [machine.shutdown_drain] enabled = true concurrency = 5 max_retries = 3 retry_backoff = "5s" [[runners]] name = "my-runner" url = "https://gitlab.example.com/" token = "xxx" executor = "docker+machine" [runners.machine] IdleCount = 5 IdleTime = 600 MachineName = "auto-scale-%s" MachineDriver = "google" MachineOptions = ["google-project=my-project", "google-zone=us-central1-a"] ``` ## The `[session_server]` section To interact with jobs, specify the `[session_server]` section at the root level, outside the `[[runners]]` section. Configure this section once for all runners, not for each individual runner. ```toml # Example `config.toml` file with session server configured concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file log_level = "warning" log_format = "runner" check_interval = 3 # Value in seconds [session_server] listen_address = "[::]:8093" # Listen on all available interfaces on port `8093` advertise_address = "runner-host-name.tld:8093" session_timeout = 1800 ``` When you configure the `[session_server]` section: - For `listen_address` and `advertise_address`, use the format `host:port`, where `host` is the IP address (`127.0.0.1:8093`) or domain (`my-runner.example.com:8093`). The runner uses this information to create a TLS certificate for a secure connection. - Ensure that GitLab can connect to the IP address and port defined in `listen_address` or `advertise_address`. - Ensure that `advertise_address` is a public IP address, unless you have enabled the application setting, [`allow_local_requests_from_web_hooks_and_services`](https://docs.gitlab.com/api/settings/#available-settings). | Setting | Description | |---------------------|-------------| | `listen_address` | An internal URL for the session server. | | `advertise_address` | The URL to access the session server. GitLab Runner exposes it to GitLab. If not defined, `listen_address` is used. | | `session_timeout` | Number of seconds the session can stay active after the job completes. The timeout blocks the job from finishing. Default is `1800` (30 minutes). | To disable the session server and terminal support, delete the `[session_server]` section. > [!note] > When your runner instance is already running, you might need to execute `gitlab-runner restart` for the changes in the `[session_server]` section to be take effect. If you are using the GitLab Runner Docker image, you must expose port `8093` by adding `-p 8093:8093` to your [`docker run` command](../install/docker.md). ## The `[[runners]]` section Each `[[runners]]` section defines one runner. | Setting | Description | | ------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | `name` | The runner's description. Informational only. | | `url` | GitLab instance URL. Supports environment variable expansion (for example, `$GITLAB_URL` or `${GITLAB_URL}`). | | `token` | The runner's authentication token, which is obtained during runner registration. [Not the same as the registration token](https://docs.gitlab.com/api/runners/#registration-and-authentication-tokens). Supports environment variable expansion (for example, `$RUNNER_TOKEN` or `${RUNNER_TOKEN}`). | | `tls-ca-file` | When using HTTPS, file that contains the certificates to verify the peer. See [Self-signed certificates or custom Certification Authorities documentation](tls-self-signed.md). | | `tls-cert-file` | When using HTTPS, file that contains the certificate to authenticate with the peer. | | `tls-key-file` | When using HTTPS, file that contains the private key to authenticate with the peer. | | `limit` | Limit how many jobs can be handled concurrently by this registered runner. `0` (default) means do not limit. View how this setting works with the [Docker Machine](autoscale.md#limit-the-number-of-vms-created-by-the-docker-machine-executor), [Instance](../executors/instance.md), and [Docker Autoscaler](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance) executors. | | `executor` | The environment or command processor on the host operating system that the runner uses to run a CI/CD job. For more information, see [executors](../executors/_index.md). | | `shell` | Name of shell to generate the script. Default value is [platform dependent](../shells/_index.md). | | `builds_dir` | Absolute path to a directory where builds are stored in the context of the selected executor. For example, locally, Docker, or SSH. | | `cache_dir` | Absolute path to a directory where build caches are stored in context of selected executor. For example, locally, Docker, or SSH. If the `docker` executor is used, this directory needs to be included in its `volumes` parameter. | | `environment` | Append or overwrite environment variables. | | `request_concurrency` | Limit number of concurrent requests for new jobs from GitLab. Default is `1`. For more information about how `concurrency` , `limit`, and `request_concurrency` interact to control job flow, see the [KB article on GitLab Runner concurrency tuning](https://support.gitlab.com/hc/en-us/articles/21324350882076-GitLab-Runner-Concurrency-Tuning-Understanding-request-concurrency). | | `strict_check_interval` | Under normal operation, when a runner polls for jobs and receives a job, it immediately re-polls for jobs until the number of jobs being processed matches `concurrent` or `limit`, or until no jobs are available. When you turn on `strict_check_interval`, the runner disables this faster-than-`check_interval` re-polling loop and strictly respects `check_interval`. Default is `false`. | | `output_limit` | Maximum build log size in kilobytes. Default is `4096` (4 MB). | | `pre_get_sources_script` | Commands to be executed on the runner before updating the Git repository and updating submodules. Use it to adjust the Git client configuration first, for example. To insert multiple commands, use a (triple-quoted) multi-line string or `\n` character. | | `post_get_sources_script` | Commands to be executed on the runner after updating the Git repository and updating submodules. To insert multiple commands, use a (triple-quoted) multi-line string or `\n` character. | | `pre_build_script` | Commands to be executed on the runner before executing the job. Runs in the same shell context as `before_script`, `script`, and `post_build_script`. If `pre_build_script` fails, the remaining commands in that context are skipped, but `after_script` still runs. To insert multiple commands, use a (triple-quoted) multi-line string or `\n` character. | | `post_build_script` | Commands to be executed on the runner after executing the job. Runs in the same shell context as `pre_build_script`, `before_script`, and `script`. If any of those fail, `post_build_script` is skipped. `after_script` runs in a separate shell context and is not affected by `post_build_script`. To insert multiple commands, use a (triple-quoted) multi-line string or `\n` character. | | `clone_url` | Overwrite the URL for the GitLab instance. Used only if the runner can't connect to the GitLab URL. | | `debug_trace_disabled` | Disables [debug tracing](https://docs.gitlab.com/ci/variables/#enable-debug-logging). When set to `true`, the debug log (trace) remains disabled even if `CI_DEBUG_TRACE` is set to `true`. | | `clean_git_config` | Cleans the Git configuration. For more information, see [Cleaning Git configuration](#cleaning-git-configuration). | | `referees` | Extra job monitoring workers that pass their results as job artifacts to GitLab. | | `unhealthy_requests_limit` | The number of `unhealthy` responses to new job requests after which a runner worker is disabled. | | `unhealthy_interval` | Duration that a runner worker is disabled for after it exceeds the unhealthy requests limit. Supports syntax like `3600 s`, `1 h 30 min`, and similar. | | `job_status_final_update_retry_limit` | The maximum number of times GitLab Runner can retry to push the final job status to the GitLab instance. | Example: ```toml [[runners]] name = "example-runner" url = "http://gitlab.example.com/" token = "TOKEN" limit = 0 executor = "docker" builds_dir = "" shell = "" environment = ["ENV=value", "LC_ALL=en_US.UTF-8"] clone_url = "http://gitlab.example.local" ``` ### Use environment variables for sensitive values You can use environment variables in the `token` and `url` fields to avoid storing sensitive values directly in the configuration file. Both `$VAR` and `${VAR}` syntax are supported. ```toml [[runners]] name = "runner-1" url = "$GITLAB_URL" token = "${RUNNER_TOKEN_1}" executor = "docker" [[runners]] name = "runner-2" url = "$GITLAB_URL" token = "${RUNNER_TOKEN_2}" executor = "docker" ``` This is useful for: - Kubernetes deployments where tokens are mounted from secrets - Docker deployments where tokens are passed as environment variables - Avoiding secrets in version-controlled configuration files ### Legacy `/ci` URL suffix {{< history >}} - Deprecated in [GitLab Runner 1.0.0](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/289). - Warning added in GitLab Runner 18.7.0. {{< /history >}} In versions of GitLab Runner before 1.0.0, the runner URL was configured with a `/ci` suffix, such as `url = "https://gitlab.example.com/ci"`. This suffix is no longer required and should be removed from your configuration. If your `config.toml` contains a URL with the `/ci` suffix, GitLab Runner automatically strips it when processing the configuration. However, you should update your configuration file to remove the suffix to avoid potential issues. #### Known issues - Git submodule authentication failures: When `GIT_SUBMODULE_FORCE_HTTPS=true` is set, submodules might fail to clone with authentication errors like `fatal: could not read Username for 'https://gitlab.example.com': terminal prompts disabled`. This issue occurs because the `/ci` suffix interferes with Git URL rewriting rules. For more details, see [issue 581678](https://gitlab.com/gitlab-org/gitlab/-/work_items/581678#note_2934077238). **Problematic configuration**: ```toml [[runners]] name = "legacy-runner" url = "https://gitlab.example.com/ci" # Remove the /ci suffix token = "TOKEN" executor = "docker" ``` **Corrected configuration**: ```toml [[runners]] name = "legacy-runner" url = "https://gitlab.example.com" # /ci suffix removed token = "TOKEN" executor = "docker" ``` When GitLab Runner starts with a URL containing the `/ci` suffix, it logs a warning message: ```plaintext WARNING: The runner URL contains a legacy '/ci' suffix. This suffix is deprecated and should be removed from the configuration. Git submodules may fail to clone with authentication errors if this suffix is present. Please update the 'url' field in your config.toml to remove the '/ci' suffix. See https://docs.gitlab.com/runner/configuration/advanced-configuration/#legacy-ci-url-suffix for more information. ``` To resolve this warning, edit your `config.toml` file and remove the `/ci` suffix from the `url` field. ### How `clone_url` works When the GitLab instance is available at a URL that the runner can't use, you can configure a `clone_url`. For example, a firewall might prevent the runner from reaching the URL. If the runner can reach the node on `192.168.1.23`, set the `clone_url` to `http://192.168.1.23`. If the `clone_url` is set, the runner constructs a clone URL in the form of `http://gitlab-ci-token:s3cr3tt0k3n@192.168.1.23/namespace/project.git`. > [!note] > `clone_url` does not affect Git LFS endpoints or artifact uploads or downloads. #### Modify Git LFS endpoints To modify [Git LFS](https://docs.gitlab.com/topics/git/lfs/) endpoints, set `pre_get_sources_script` in one of the following files: - `config.toml`: ```toml pre_get_sources_script = "mkdir -p $RUNNER_TEMP_PROJECT_DIR/git-template; git config -f $RUNNER_TEMP_PROJECT_DIR/git-template/config lfs.url https://" ``` - `.gitlab-ci.yml`: ```yaml default: hooks: pre_get_sources_script: - mkdir -p $RUNNER_TEMP_PROJECT_DIR/git-template - git config -f $RUNNER_TEMP_PROJECT_DIR/git-template/config lfs.url https://localhost ``` ### How `unhealthy_requests_limit` and `unhealthy_interval` works When a GitLab instance is unavailable for a long time (for example, during a version upgrade), its runners become idle. The runners do not resume job processing for 30-60 minutes after the GitLab instance is available again. To increase or decrease the duration that runners are idle, change the `unhealthy_interval` setting. To change runner's number of connection attempts to the GitLab server and receive an unhealthy sleep before becoming idle, change the `unhealthy_requests_limit` setting. For more information, see [How `check_interval` works](advanced-configuration.md#how-check_interval-works). ## The executors The following executors are available. | Executor | Required configuration | Where jobs run | |---------------------|-------------------------------------------------------------------------|----------------| | `shell` | | Local shell. The default executor. | | `docker` | `[runners.docker]` and [Docker Engine](https://docs.docker.com/engine/) | A Docker container. | | `docker-windows` | `[runners.docker]` and [Docker Engine](https://docs.docker.com/engine/) | A Windows Docker container. | | `ssh` | `[runners.ssh]` | SSH, remotely. | | `parallels` | `[runners.parallels]` and `[runners.ssh]` | Parallels VM, but connect with SSH. | | `virtualbox` | `[runners.virtualbox]` and `[runners.ssh]` | VirtualBox VM, but connect with SSH. | | `docker+machine` | `[runners.docker]` and `[runners.machine]` | Like `docker`, but use [auto-scaled Docker machines](autoscale.md). | | `kubernetes` | `[runners.kubernetes]` | Kubernetes pods. | | `docker-autoscaler` | `[docker-autoscaler]` and `[runners.autoscaler]` | Like `docker`, but uses autoscaled instances to run CI/CD jobs in containers. | | `instance` | `[docker-autoscaler]` and `[runners.autoscaler]` | Like `shell`, but uses autoscaled instances to run CI/CD jobs directly on the host instance. | ## The shells CI/CD jobs run locally on the host machine when configured to use the shell executor. The supported operating system shells are: | Shell | Description | |--------------|-------------| | `bash` | Generate Bash (Bourne-shell) script. All commands executed in Bash context. Default for all Unix systems. | | `sh` | Generate Sh (Bourne-shell) script. All commands executed in Sh context. The fallback for `bash` for all Unix systems. | | `powershell` | Generate PowerShell script. All commands are executed in PowerShell Desktop context. Default shell for jobs on Windows with the `kubernetes` and `docker-windows` executors. | | `pwsh` | Generate PowerShell script. All commands are executed in PowerShell Core context. Default shell for new runner registration on Windows, and for jobs with the `shell` executor. | When the `shell` option is set to `bash` or `sh`, Bash's [ANSI-C quoting](https://www.gnu.org/software/bash/manual/html_node/ANSI_002dC-Quoting.html) is used to shell escape job scripts. ### Use a POSIX-compliant shell In GitLab Runner 14.9 and later, [enable the feature flag](feature-flags.md) named `FF_POSIXLY_CORRECT_ESCAPES` to use a POSIX-compliant shell (like `dash`). When enabled, ["Double Quotes"](https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02), which is POSIX-compliant shell escaping mechanism, is used. ## The `[runners.docker]` section The following settings define the Docker container parameters. These settings are applicable when the runner is configured to use the Docker executor. [Docker-in-Docker](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker) as a service, or any container runtime configured inside a job, does not inherit these parameters. | Parameter | Example | Description | |------------------------------------|--------------------------------------------------|-------------| | `allowed_images` | `["ruby:*", "python:*", "php:*"]` | Wildcard list of images that can be specified in the `.gitlab-ci.yml` file. If not present, all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) or [Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executors. | | `allowed_privileged_images` | | Wildcard subset of `allowed_images` that runs in privileged mode when `privileged` is enabled. If not present, all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) executors. | | `allowed_pull_policies` | | List of pull policies that can be specified in the `.gitlab-ci.yml` file or the `config.toml` file. If not specified, only the pull policies specified in `pull-policy` are allowed. Use with the [Docker](../executors/docker.md#allow-docker-pull-policies) executor. | | `allowed_services` | `["postgres:9", "redis:*", "mysql:*"]` | Wildcard list of services that can be specified in the `.gitlab-ci.yml` file. If not present, all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) or [Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executors. | | `allowed_privileged_services` | | Wildcard subset of `allowed_services` that is allowed to run in privileged mode, when `privileged` or `services_privileged` is enabled. If not present, all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) executors. | | `cache_dir` | | Directory where Docker caches should be stored. This path can be absolute or relative to current working directory. See `disable_cache` for more information. | | `cap_add` | `["NET_ADMIN"]` | Add additional Linux capabilities to the container. | | `cap_drop` | `["DAC_OVERRIDE"]` | Drop additional Linux capabilities from the container. | | `cpuset_cpus` | `"0,1"` | The control group's `CpusetCpus`. A string. | | `cpuset_mems` | `"0,1"` | The control group's `CpusetMems`. A string. | | `cpu_shares` | | Number of CPU shares used to set relative CPU usage. Default is `1024`. | | `cpus` | `"2"` | Number of CPUs (available in Docker 1.13 or later). A string. | | `devices` | `["/dev/net/tun"]` | Share additional host devices with the container. | | `device_cgroup_rules` | | Custom device `cgroup` rules (available in Docker 1.28 or later). | | `disable_cache` | | The Docker executor has two levels of caching: a global one (like any other executor) and a local cache based on Docker volumes. This configuration flag acts only on the local one which disables the use of automatically created (not mapped to a host directory) cache volumes. In other words, it only prevents creating a container that holds temporary files of builds, it does not disable the cache if the runner is configured in [distributed cache mode](autoscale.md#distributed-runners-caching). | | `disable_entrypoint_overwrite` | | Disable the image entrypoint overwriting. | | `dns` | `["8.8.8.8"]` | A list of DNS servers for the container to use. | | `dns_search` | | A list of DNS search domains. | | `extra_hosts` | `["other-host:127.0.0.1"]` | Hosts that should be defined in container environment. | | `gpus` | | GPU devices for Docker container. Uses the same format as the `docker` CLI. View details in the [Docker documentation](https://docs.docker.com/engine/containers/resource_constraints/#gpu). Requires [configuration to enable GPUs](gpus.md#docker-executor). | | `group_add` | `["docker"]` | Add additional groups for the container process to run. | | `helper_image` | | (Advanced) [The default helper image](#helper-image) used to clone repositories and upload artifacts. | | `helper_image_flavor` | | Sets the helper image flavor (`alpine`, `alpine3.21`, `alpine-latest`, `ubi-fips` or `ubuntu`). Defaults to `alpine`. The `alpine` flavor uses the same version as `alpine-latest`. | | `helper_image_autoset_arch_and_os` | | Uses the underlying OS to set the Helper Image architecture and OS. | | `host` | | Custom Docker endpoint. Default is `DOCKER_HOST` environment or `unix:///var/run/docker.sock`. | | `hostname` | | Custom hostname for the Docker container. | | `image` | `"ruby:3.3"` | The image to run jobs with. | | `links` | `["mysql_container:mysql"]` | Containers that should be linked with container that runs the job. | | `log_options` | `{"env": "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", "labels": "com.gitlab.gitlab-runner.type"}` | Log driver options for Docker containers that use the `json-file` log driver. Only `env` and `labels` options are allowed. For more information, see [Docker log options](#docker-log-options). | | `memory` | `"128m"` | The memory limit. A string. | | `memory_swap` | `"256m"` | The total memory limit. A string. | | `memory_reservation` | `"64m"` | The memory soft limit. A string. | | `network_mode` | | Add container to a custom network. | | `mac_address` | `92:d0:c6:0a:29:33` | Container MAC address | | `oom_kill_disable` | | If an out-of-memory (`OOM`) error occurs, do not terminate processes in a container. | | `oom_score_adjust` | | `OOM` score adjustment. Positive means terminate the processes earlier. | | `privileged` | `false` | Make the container run in privileged mode. Insecure. | | `services_privileged` | | Allow services to run in privileged mode. If unset (default) `privileged` value is used instead. Use with the [Docker](../executors/docker.md#allow-docker-pull-policies) executor. Insecure. | | `pull_policy` | | The image pull policy: `never`, `if-not-present` or `always` (default). View details in the [pull policies documentation](../executors/docker.md#configure-how-runners-pull-images). You can also add [multiple pull policies](../executors/docker.md#set-multiple-pull-policies), [retry a failed pull](../executors/docker.md#retry-a-failed-pull), or [restrict pull policies](../executors/docker.md#allow-docker-pull-policies). | | `runtime` | | The runtime for the Docker container. | | `isolation` | | Container isolation technology (`default`, `hyperv` and `process`). Windows only. | | `security_opt` | | Security options (--security-opt in `docker run`). Takes a list of `:` separated key/values. `systempaths` specification is not supported. For more information, see [issue 36810](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/36810). | | `shm_size` | `300000` | Shared memory size for images (in bytes). | | `sysctls` | | The `sysctl` options. | | `tls_cert_path` | On macOS `/Users//.boot2docker/certs`. | A directory where `ca.pem`, `cert.pem` or `key.pem` are stored and used to make a secure TLS connection to Docker. Use this setting with `boot2docker`. | | `tls_verify` | | Enable or disable TLS verification of connections to the Docker daemon. Disabled by default. By default, GitLab Runner connects to the Docker Unix socket over SSH. The Unix socket does not support RTLS and communicates over HTTP with SSH to provide encryption and authentication. Enabling `tls_verify` is not typically needed and requires additional configuration. To enable `tls_verify`, the daemon must listen on a port (rather than the default Unix socket) and the GitLab Runner Docker host must use the address the daemon is listening on. | | `user` | | Run all commands in the container as the specified user. | | `userns_mode` | | The user namespace mode for the container and Docker services when user namespace remapping option is enabled. Available in Docker 1.10 or later. For details, see [Docker documentation](https://docs.docker.com/engine/security/userns-remap/#disable-namespace-remapping-for-a-container). | | `ulimit` | | Ulimit values that are passed to the container. Uses the same syntax as the Docker `--ulimit` flag. | | `volume_keep` | | When `true`, Docker volumes are not deleted when the runner cleans up a container after a job. Volumes accumulate on disk. The operator is responsible for periodic cleanup (for example, `docker volume prune` in a cron job). Use this setting in high-concurrency environments where volume removal blocks the Docker daemon. Default is `false`. | | `volumes` | `["/data", "/home/project/cache"]` | Additional volumes that should be mounted. Same syntax as the Docker `-v` flag. | | `volumes_from` | `["storage_container:ro"]` | A list of volumes to inherit from another container in the form `[:]`. Access level defaults to read-write, but can be manually set to `ro` (read-only) or `rw` (read-write). | | `volume_driver` | | The volume driver to use for the container. | | `wait_for_services_timeout` | `30` | How long to wait for Docker services. Set to `-1` to disable. Default is `30`. | | `container_labels` | | A set of labels to add to each container created by the runner. The label value can include environment variables for expansion. | | `services_limit` | | Set the maximum allowed services per job. `-1` (default) means there is no limit. | | `service_cpuset_cpus` | | String value containing the `cgroups CpusetCpus` to use for a service. | | `service_cpu_shares` | | Number of CPU shares used to set a service's relative CPU usage (default: [`1024`](https://docs.docker.com/engine/containers/resource_constraints/#cpu)). | | `service_cpus` | | String value of the number of CPUs for a service. Available in Docker 1.13 or later. | | `service_gpus` | | GPU devices for Docker container. Uses the same format as the `docker` CLI. View details in the [Docker documentation](https://docs.docker.com/engine/containers/resource_constraints/#gpu). Requires [configuration to enable GPUs](gpus.md#docker-executor). | | `service_memory` | | String value of the memory limit for a service. | | `service_memory_swap` | | String value of the total memory limit for a service. | | `service_memory_reservation` | | String value of the memory soft limit for a service. | ### The `[[runners.docker.services]]` section Specify additional [services](https://docs.gitlab.com/ci/services/) to run with the job. For a list of available images, see the [Docker Registry](https://hub.docker.com). Each service runs in a separate container and is linked to the job. | Parameter | Example | Description | |---------------|------------------------------------|-------------| | `name` | `"registry.example.com/svc1"` | The name of the image to be run as a service. | | `alias` | `"svc1"` | Additional [alias name](https://docs.gitlab.com/ci/services/#available-settings-for-services) that can be used to access the service. | | `entrypoint` | `["entrypoint.sh"]` | Command or script that should be executed as the container's entrypoint. The syntax is similar to the [Dockerfile ENTRYPOINT](https://docs.docker.com/reference/dockerfile/#entrypoint) directive, where each shell token is a separate string in the array. Introduced in [GitLab Runner 13.6](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173). | | `command` | `["executable","param1","param2"]` | Command or script that should be used as the container's command. The syntax is similar to the [Dockerfile CMD](https://docs.docker.com/reference/dockerfile/#cmd) directive, where each shell token is a separate string in the array. Introduced in [GitLab Runner 13.6](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173). | | `environment` | `["ENV1=value1", "ENV2=value2"]` | Append or overwrite environment variables for the service container. | Example: ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" memory = "128m" memory_swap = "256m" memory_reservation = "64m" oom_kill_disable = false cpuset_cpus = "0,1" cpuset_mems = "0,1" cpus = "2" dns = ["8.8.8.8"] dns_search = [""] service_memory = "128m" service_memory_swap = "256m" service_memory_reservation = "64m" service_cpuset_cpus = "0,1" service_cpus = "2" services_limit = 5 privileged = false group_add = ["docker"] cap_add = ["NET_ADMIN"] cap_drop = ["DAC_OVERRIDE"] devices = ["/dev/net/tun"] disable_cache = false wait_for_services_timeout = 30 cache_dir = "" volumes = ["/data", "/home/project/cache"] extra_hosts = ["other-host:127.0.0.1"] shm_size = 300000 volumes_from = ["storage_container:ro"] links = ["mysql_container:mysql"] allowed_images = ["ruby:*", "python:*", "php:*"] allowed_services = ["postgres:9", "redis:*", "mysql:*"] log_options = { env = "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", labels = "com.gitlab.gitlab-runner.type" } [runners.docker.ulimit] "rtprio" = "99" [[runners.docker.services]] name = "registry.example.com/svc1" alias = "svc1" entrypoint = ["entrypoint.sh"] command = ["executable","param1","param2"] environment = ["ENV1=value1", "ENV2=value2"] [[runners.docker.services]] name = "redis:2.8" alias = "cache" [[runners.docker.services]] name = "postgres:9" alias = "postgres-db" [runners.docker.sysctls] "net.ipv4.ip_forward" = "1" ``` ### Volumes in the `[runners.docker]` section For more information about volumes, see the [Docker documentation](https://docs.docker.com/engine/storage/volumes/). The following examples show how to specify volumes in the `[runners.docker]` section. #### Example 1: Add a data volume A data volume is a specially-designated directory in one or more containers that bypasses the Union File System. Data volumes are designed to persist data, independent of the container's lifecycle. ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" privileged = false disable_cache = true volumes = ["/path/to/volume/in/container"] ``` This example creates a new volume in the container at `/path/to/volume/in/container`. #### Example 2: Mount a host directory as a data volume When you want to store directories outside the container, you can mount a directory from your Docker daemon's host into a container: ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" privileged = false disable_cache = true volumes = ["/path/to/bind/from/host:/path/to/bind/in/container:rw"] ``` This example uses `/path/to/bind/from/host` of the CI/CD host in the container at `/path/to/bind/in/container`. GitLab Runner 11.11 and later [mount the host directory](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1261) for the defined [services](https://docs.gitlab.com/ci/services/) as well. ### Docker log options The `log_options` parameter allows you to configure Docker container log options for the `json-file` log driver. For security and compatibility reasons, only the `env` and `labels` options are supported. #### Supported log options - `env`: Comma-separated list of environment variable names to include in log entries - `labels`: Comma-separated list of container label names to include in log entries #### Configuration examples The following are some configuration examples: ```toml [[runners]] [runners.docker] # Include specific environment variables in logs log_options = { env = "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME,CI_PIPELINE_ID" } ``` ```toml [[runners]] [runners.docker] # Include container labels in logs log_options = { labels = "com.gitlab.gitlab-runner.type" } ``` ```toml [[runners]] [runners.docker] # Include both environment variables and labels log_options = { env = "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", labels = "com.gitlab.gitlab-runner.type" } ``` #### Validation and error handling GitLab Runner validates log options during executor preparation. If you specify unsupported options such as `max-size`, `max-file`, or `compress`, the job fails immediately with a configuration error. The log options apply to the main job container and any service containers defined in your CI/CD configuration. For more information about Docker logging, see the [Docker `json-file` log driver documentation](https://docs.docker.com/config/containers/logging/json-file/). ### Use a private container registry To use private registries as a source of images for your jobs, configure authorization with the [CI/CD variable](https://docs.gitlab.com/ci/variables/) `DOCKER_AUTH_CONFIG`. You can set the variable in one of the following: - The CI/CD settings of the project as the [`file` type](https://docs.gitlab.com/ci/variables/#use-file-type-cicd-variables) - The `config.toml` file Using private registries with the `if-not-present` pull policy may introduce [security implications](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy). For more information about how pull policies work, see [Configure how runners pull images](../executors/docker.md#configure-how-runners-pull-images). For more information about using private container registries, see: - [Access an image from a private container registry](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry) - [`.gitlab-ci.yml` keyword reference](https://docs.gitlab.com/ci/yaml/#image) The steps performed by the runner can be summed up as: 1. The registry name is found from the image name. 1. If the value is not empty, the executor searches for the authentication configuration for this registry. 1. Finally, if an authentication corresponding to the specified registry is found, subsequent pulls makes use of it. #### Support for GitLab integrated registry GitLab sends credentials for its integrated registry along with the job's data. These credentials are automatically added to the registry's authorization parameters list. After this step, authorization against the registry proceeds similarly to configuration added with the `DOCKER_AUTH_CONFIG` variable. In your jobs, you can use any image from your GitLab integrated registry, even if the image is private or protected. For information on the images jobs have access to, read the [CI/CD job token documentation](https://docs.gitlab.com/ci/jobs/ci_job_token/) documentation. #### Precedence of Docker authorization resolving As described earlier, GitLab Runner can authorize Docker against a registry by using credentials sent in different way. To find a proper registry, the following precedence is taken into account: 1. Credentials configured with `DOCKER_AUTH_CONFIG`. 1. Credentials configured locally on the GitLab Runner host with `~/.docker/config.json` or `~/.dockercfg` files (for example, by running `docker login` on the host). 1. Credentials sent by default with a job's payload (for example, credentials for the integrated registry described earlier). The first credentials found for the registry are used. So for example, if you add credentials for the integrated registry with the `DOCKER_AUTH_CONFIG` variable, then the default credentials are overridden. ## The `[runners.parallels]` section The following parameters are for Parallels. | Parameter | Description | |---------------------|-------------| | `base_name` | Name of Parallels VM that is cloned. | | `template_name` | Custom name of Parallels VM linked template. Optional. | | `disable_snapshots` | If disabled, the VMs are destroyed when the jobs are done. | | `allowed_images` | List of allowed `image`/`base_name` values, represented as regular expressions. See the [Overriding the base VM image](#overriding-the-base-vm-image) section for more details. | Example: ```toml [runners.parallels] base_name = "my-parallels-image" template_name = "" disable_snapshots = false ``` ## The `[runners.virtualbox]` section The following parameters are for VirtualBox. This executor relies on the `vboxmanage` executable to control VirtualBox machines, so you have to adjust your `PATH` environment variable on Windows hosts: `PATH=%PATH%;C:\Program Files\Oracle\VirtualBox`. | Parameter | Explanation | |---------------------|-------------| | `base_name` | Name of the VirtualBox VM that is cloned. | | `base_snapshot` | Name or UUID of a specific snapshot of the VM to create a linked clone from. If this value is empty or omitted, the current snapshot is used. If no current snapshot exists, one is created. Unless `disable_snapshots` is true, in which case a full clone of the base VM is made. | | `base_folder` | Folder to save the new VM in. If this value is empty or omitted, the default VM folder is used. | | `disable_snapshots` | If disabled, the VMs are destroyed when the jobs are done. | | `allowed_images` | List of allowed `image`/`base_name` values, represented as regular expressions. See the [Overriding the base VM image](#overriding-the-base-vm-image) section for more details. | | `start_type` | Graphical front-end type when starting the VM. | Example: ```toml [runners.virtualbox] base_name = "my-virtualbox-image" base_snapshot = "my-image-snapshot" disable_snapshots = false start_type = "headless" ``` The `start_type` parameter determines the graphical front end used when starting the virtual image. Valid values are `headless` (default), `gui` or `separate` as supported by the host and guest combination. ## Overriding the base VM image For both the Parallels and VirtualBox executors, you can override the base VM name specified by `base_name`. To do this, use the [image](https://docs.gitlab.com/ci/yaml/#image) parameter in the `.gitlab-ci.yml` file. For backward compatibility, you cannot override this value by default. Only the image specified by `base_name` is allowed. To allow users to select a VM image by using the `.gitlab-ci.yml` [image](https://docs.gitlab.com/ci/yaml/#image) parameter: ```toml [runners.virtualbox] ... allowed_images = [".*"] ``` In the example, any existing VM image can be used. The `allowed_images` parameter is a list of regular expressions. Configuration can be as precise as required. For instance, if you want to allow only certain VM images, you can use regex like: ```toml [runners.virtualbox] ... allowed_images = ["^allowed_vm[1-2]$"] ``` In this example, only `allowed_vm1` and `allowed_vm2` are allowed. Any other attempts result in an error. ## The `[runners.ssh]` section The following parameters define the SSH connection. | Parameter | Description | |------------------------------------|-------------| | `host` | Where to connect. | | `port` | Port. Default is `22`. | | `user` | Username. | | `password` | Password. | | `identity_file` | File path to SSH private key (`id_rsa`, `id_dsa`, or `id_edcsa`). The file must be stored unencrypted. | | `disable_strict_host_key_checking` | This value determines if the runner should use strict host key checking. Default is `true`. In GitLab 15.0, the default value, or the value if it's not specified, is `false`. | Example: ```toml [runners.ssh] host = "my-production-server" port = "22" user = "root" password = "production-server-password" identity_file = "" ``` ## The `[runners.machine]` section The following parameters define the Docker Machine-based autoscaling feature. For more information, see [Docker Machine Executor autoscale configuration](autoscale.md). | Parameter | Description | |-----------------------------------|-------------| | `MaxGrowthRate` | The maximum number of machines that can be added to the runner in parallel. Default is `0` (no limit). | | `IdleCount` | Number of machines that need to be created and waiting in _Idle_ state. | | `IdleScaleFactor` | The number of _Idle_ machines as a factor of the number of machines in use. Must be in float number format. See [the autoscale documentation](autoscale.md#the-idlescalefactor-strategy) for more details. Defaults to `0.0`. | | `IdleCountMin` | Minimal number of machines that need to be created and waiting in _Idle_ state when the `IdleScaleFactor` is in use. Default is 1. | | `IdleTime` | Time (in seconds) for machine to be in _Idle_ state before it is removed. | | `[[runners.machine.autoscaling]]` | Multiple sections, each containing overrides for autoscaling configuration. The last section with an expression that matches the current time is selected. | | `OffPeakPeriods` | Deprecated: Time periods when the scheduler is in the OffPeak mode. An array of cron-style patterns (described [below](#periods-syntax)). | | `OffPeakTimezone` | Deprecated: Time zone for the times given in OffPeakPeriods. A time zone string like `Europe/Berlin`. Defaults to the locale system setting of the host if omitted or empty. GitLab Runner attempts to locate the time zone database in the directory or uncompressed zip file named by the `ZONEINFO` environment variable, then looks in known installation locations on Unix systems, and finally looks in `$GOROOT/lib/time/zoneinfo.zip`. | | `OffPeakIdleCount` | Deprecated: Like `IdleCount`, but for _Off Peak_ time periods. | | `OffPeakIdleTime` | Deprecated: Like `IdleTime`, but for _Off Peak_ time periods. | | `MaxBuilds` | Maximum job (build) count before machine is removed. | | `MachineName` | Name of the machine. It **must** contain `%s`, which is replaced with a unique machine identifier. | | `MachineDriver` | Docker Machine `driver`. View details in the [Cloud Providers Section in the Docker Machine configuration](autoscale.md#supported-cloud-providers). | | `MachineOptions` | Docker Machine options for the MachineDriver. For more information, see [Supported Cloud Providers](autoscale.md#supported-cloud-providers). For more information about all options for AWS, see the [AWS](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md) and [GCP](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/gce.md) projects in the Docker Machine repository. | ### The `[[runners.machine.autoscaling]]` sections The following parameters define the configuration available when using the [Instance](../executors/instance.md) or [Docker Autoscaler](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance) executor. | Parameter | Description | |-------------------|-------------| | `Periods` | Time periods during which this schedule is active. An array of cron-style patterns (described [below](#periods-syntax)). | | `IdleCount` | Number of machines that need to be created and waiting in _Idle_ state. | | `IdleScaleFactor` | (Experiment) The number of _Idle_ machines as a factor of the number of machines in use. Must be in float number format. See [the autoscale documentation](autoscale.md#the-idlescalefactor-strategy) for more details. Defaults to `0.0`. | | `IdleCountMin` | Minimal number of machines that need to be created and waiting in _Idle_ state when the `IdleScaleFactor` is in use. Default is 1. | | `IdleTime` | Time (in seconds) for a machine to be in _Idle_ state before it is removed. | | `Timezone` | Time zone for the times given in `Periods`. A time zone string like `Europe/Berlin`. Defaults to the locale system setting of the host if omitted or empty. GitLab Runner attempts to locate the time zone database in the directory or uncompressed zip file named by the `ZONEINFO` environment variable, then looks in known installation locations on Unix systems, and finally looks in `$GOROOT/lib/time/zoneinfo.zip`. | Example: ```toml [runners.machine] IdleCount = 5 IdleTime = 600 MaxBuilds = 100 MachineName = "auto-scale-%s" MachineDriver = "google" # Refer to Docker Machine docs on how to authenticate: https://docs.docker.com/machine/drivers/gce/#credentials MachineOptions = [ # Additional machine options can be added using the Google Compute Engine driver. # If you experience problems with an unreachable host (ex. "Waiting for SSH"), # you should remove optional parameters to help with debugging. # https://docs.docker.com/machine/drivers/gce/ "google-project=GOOGLE-PROJECT-ID", "google-zone=GOOGLE-ZONE", # e.g. 'us-central1-a', full list in https://cloud.google.com/compute/docs/regions-zones/ ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleCountMin = 5 IdleScaleFactor = 1.5 # Means that current number of Idle machines will be 1.5*in-use machines, # no more than 50 (the value of IdleCount) and no less than 5 (the value of IdleCountMin) IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` ### Periods syntax The `Periods` setting contains an array of string patterns of time periods represented in a cron-style format. The line contains following fields: ```plaintext [second] [minute] [hour] [day of month] [month] [day of week] [year] ``` Like in the standard cron configuration file, the fields can contain single values, ranges, lists, and asterisks. View [a detailed description of the syntax](https://github.com/gorhill/cronexpr#implementation). ## The `[runners.instance]` section | Parameter | Type | Description | |------------------|--------|-------------| | `allowed_images` | string | When VM Isolation is enabled, `allowed_images` controls which images a job is allowed to specify. | ## The `[runners.autoscaler]` section {{< history >}} - Introduced in GitLab Runner v15.10.0. {{< /history >}} The following parameters configure the autoscaler feature. You can only use these parameters with the [Instance](../executors/instance.md) and [Docker Autoscaler](../executors/docker_autoscaler.md) executors. | Parameter | Description | |----------------------------------|-------------| | `capacity_per_instance` | The number of jobs that can be executed concurrently by a single instance. | | `max_use_count` | The maximum number of times an instance can be used before it is scheduled for removal. | | `max_instances` | The maximum number of instances that are allowed, this is regardless of the instance state (pending, running, deleting). Default: `0` (unlimited). | | `plugin` | The [fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting) plugin to use. For more information about how to install and reference a plugin, see [Install the fleeting plugin](../fleet_scaling/fleeting.md#install-a-fleeting-plugin). | | `delete_instances_on_shutdown` | Specifies if all provision instances are deleted when GitLab Runner is shutting down. Default: `false`. Introduced in [GitLab Runner 15.11](https://gitlab.com/gitlab-org/fleeting/taskscaler/-/merge_requests/24) | | `instance_ready_command` | Executes this command on each instance provisioned by the autoscaler to ensure that it is ready for use. A failure results in the instance being removed. Introduced in [GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37473). | | `instance_acquire_timeout` | The maximum duration the runner waits to acquire an instance before it times out. Default: `15m` (15 minutes). You can adjust this value to better suit your environment. Introduced in [GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5563). | | `update_interval` | The interval to check with the fleeting plugin for instance updates. Default: `1m` (1 minute). Introduced in [GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4722). | | `update_interval_when_expecting` | The interval to check with the fleeting plugin for instance updates when expecting a state change. For example, when an instance has provisioned an instance and the runner is waiting to transition from `pending` to `running`. Default: `2s` (2 seconds). Introduced in [GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4722). | | `deletion_retry_interval` | The interval that the fleeting plugin waits before it retries deletion when a previous deletion attempt had no effect. Default: `1m` (1 minute). Introduced in [GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777). | | `shutdown_deletion_interval`| The interval used by the fleeting plugin between removing instances and checking their status during shutdown. Default: `10s` (10 seconds). Introduced in [GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777). | | `shutdown_deletion_retries` | The maximum number of attempts made by the fleeting plugin to ensure that the instances finish deletion before shutdown. Default: `3`. Introduced in [GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777). | | `failure_threshold` | The maximum number of consecutive health failures before the fleeting plugin replaces an instance. See also the heartbeat feature. Default: `3`. Introduced in [GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777). | | `log_internal_ip` | Specifies whether the CI/CD output logs the internal IP address of the VM. Default: `false`. Introduced in [GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5519). | | `log_external_ip` | Specifies whether the CI/CD output logs the external IP address of the VM. Default: `false`. Introduced in [GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5519). | If the `instance_ready_command` frequently fails with idle scale rules, instances might be removed and created faster than the runner accepts jobs. To support scale throttling, an exponential backoff was added in [GitLab 17.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37497). > [!note] > Autoscaler configuration options don't reload with configuration changes. However, in > GitLab 17.5.0 or later, `[[runners.autoscaler.policy]]` entries reload when configurations change. ## The `[runners.autoscaler.plugin_config]` section This hash table is re-encoded to JSON and passed directly to the configured plugin. [fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting) plugins typically have accompanying documentation on the supported configuration. ## The `[runners.autoscaler.scale_throttle]` section {{< history >}} - Introduced in GitLab Runner v17.0.0. {{< /history >}} | Parameter | Description | |-----------|-------------| | `limit` | The rate limit of new instances per second that can provisioned. `-1` is infinite. The default (`0`), sets the limit to `100`. | | `burst` | The burst limit of new instances. Defaults to `max_instances` or `limit` when `max_instances` is not set. If `limit` is infinite, `burst` is ignored. | ### Relationship between `limit` and `burst` The scale throttle uses a token quota system to create instances. This system is defined by two values: - `burst`: The maximum size of the quota. - `limit`: The rate at which the quota refreshes per second. The number of instances you can create at once depends on your remaining quota. If you have sufficient quota, you can create instances up to that amount. If the quota is depleted, you can create `limit` instances per second. When instance creation stops, the quota increases by `limit` per second until it reaches the `burst` value. For example, if `limit` is `1` and `burst` is `60`: - You can create 60 instances instantly, but you're throttled. - If you wait 60 seconds, you can instantly create another 60 instances. - If you do not wait, you can create 1 instance every second. ## The `[runners.autoscaler.connector_config]` section [fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting) plugins typically have accompanying documentation on the supported connection options. Plugins automatically update the connector configuration. You can use the `[runners.autoscaler.connector_config]` to override automatic update of the connector configuration, or to fill in the empty values that the plugin cannot determine. | Parameter | Description | |--------------------------|-------------| | `os` | The operating system of the instance. | | `arch` | The architecture of the instance. | | `protocol` | `ssh`, `winrm`, or `winrm+https`. `winrm` is used by default if Windows is detected. | | `protocol_port` | The port used to establish connection based on the specified protocol. Defaults to `ssh:22`, `winrm+http:5985`, `winrm+https:5986`. | | `username` | The username used to connect with. | | `password` | The password used to connect with. | | `key_path` | The TLS key used to connect with or dynamically provision credentials with. | | `use_static_credentials` | Disabled automatic credential provisioning. Default: `false`. | | `keepalive` | The connection keepalive duration. | | `timeout` | The connection timeout duration. | | `use_external_addr` | Whether to use the external address provided by the plugin. If the plugin only returns an internal address, it is used regardless of this setting. Default: `false`. | ## The `[runners.autoscaler.state_storage]` section {{< details >}} - Status: Beta {{< /details >}} {{< history >}} - Introduced in GitLab Runner 17.5.0. {{< /history >}} If GitLab Runner starts when state storage is disabled (default), the existing fleeting instances are removed immediately for safety reasons. For example, when `max_use_count` is set to `1`, we might inadvertently assign a job to an instance that's already been used if we don't know its usage status. Enabling the state storage feature allows an instance's state to persist on the local disk. In this case, if an instance exists when GitLab Runner starts, it is not deleted. Its cached connection details, use count, and other configurations are restored. Consider the following information when enabling the state storage feature: - The authentication details for an instance (username, password, keys) remain in the disk. - If an instance is restored when it is actively running a job, GitLab Runner removes it by default. This behavior ensures safety, as GitLab Runner cannot resume jobs. To keep the instance, set `keep_instance_with_acquisitions` to `true`. Setting `keep_instance_with_acquisitions` to `true` helps when you're not concerned about ongoing jobs on the instance. You can also use the `instance_ready_command` configuration option to clean the environment to keep the instance. This might involve stopping all executing commands or forcefully removing Docker containers. | Parameter | Description | |-----------------------------------|-------------| | `enabled` | Whether state storage is enabled. Default: `false`. | | `dir` | The state store directory. Each runner configuration entry has a subdirectory here. Default: `.taskscaler` in the GitLab Runner configuration file directory. | | `keep_instance_with_acquisitions` | Whether instances with active jobs are removed. Default: `false`. | ## The `[[runners.autoscaler.policy]]` sections **Note** - `idle_count` in this context refers to the number of jobs, not the number of autoscaled machines as in the legacy autoscaling method. | Parameter | Description | |----------------------|-------------| | `periods` | An array of unix-cron formatted strings to denote the period this policy is enabled for. Default: `* * * * *` | | `timezone` | The time zone used when evaluating the unix-cron period. Default: The system's local time zone. | | `idle_count` | The target idle capacity we want to be immediately available for jobs. | | `idle_time` | The amount of time that an instance can be idle before it is terminated. | | `scale_factor` | The target idle capacity we want to be immediately available for jobs, on top of the `idle_count`, as a factor of the current in use capacity. Defaults to `0.0`. | | `scale_factor_limit` | The maximum capacity the `scale_factor` calculation can yield. | | `preemptive_mode` | With preemptive mode turned on, jobs are requested only when an instance is confirmed to be available. This action allows jobs to start almost immediately without provisioning delays. When preemptive mode is turned off, jobs are requested first, and then the system attempts to find or provision the necessary capacity. | To decide whether to remove an idle instance, the taskscaler compares `idle_time` against the instance's idle duration. The idle period of each instance is calculated from the time the instance: - Last completed a job (if the instance is previously used). - Is provisioned (if never used). This check occurs during scaling events. Instances that exceed the configured `idle_time` are removed, unless needed to maintain the required `idle_count` job capacity. When `scale_factor` is set, `idle_count` becomes the minimum `idle` capacity and the `scaler_factor_limit` the maximum `idle` capacity. You can define multiple policies. The last matching policy is the one used. In the following example, the idle count `1` is used between 08:00 and 15:59, Monday through Friday. Otherwise, the idle count is 0. ```toml [[runners.autoscaler.policy]] idle_count = 0 idle_time = "0s" periods = ["* * * * *"] [[runners.autoscaler.policy]] idle_count = 1 idle_time = "30m0s" periods = ["* 8-15 * * mon-fri"] ``` ### Periods syntax The `periods` setting contains an array of unix-cron formatted strings to denote the period a policy is enabled for. The cron format consists of 5 fields: ```plaintext ┌────────── minute (0 - 59) │ ┌──────── hour (0 - 23) │ │ ┌────── day of month (1 - 31) │ │ │ ┌──── month (1 - 12) │ │ │ │ ┌── day of week (1 - 7 or MON-SUN, 0 is an alias for Sunday) * * * * * ``` - `-` can be used between two numbers to specify a range. - `*` can be used to represent the whole range of valid values for that field. - `/` followed by a number or can be used after a range to skip that number through the range. For example, 0-12/2 for the hour field would activate the period every 2 hours between the hours of 00:00 and 00:12. - `,` can be used to separate a list of valid numbers or ranges for the field. For example, `1,2,6-9`. It's worth keeping in mind that this cron job represents a range in time. For example: | Period | Affect | |----------------------|--------| | `1 * * * * *` | Rule enabled for the period of 1 minute every hour (unlikely to be very effective) | | `* 0-12 * * *` | Rule enabled for the period of 12 hours at the beginning of each day | | `0-30 13,16 * * SUN` | Rule enabled for the period of each Sunday for 30 minutes at 1pm and 30 minutes at 4pm. | ## The `[runners.autoscaler.vm_isolation]` section VM Isolation uses [`nesting`](../executors/instance.md#nested-virtualization), which is only supported on macOS. | Parameter | Description | |------------------|-------------| | `enabled` | Specifies if VM Isolation is enabled or not. Default: `false`. | | `nesting_host` | The `nesting` daemon host. | | `nesting_config` | The `nesting` configuration, which is serialized to JSON and sent to the `nesting` daemon. | | `image` | The default image used by the nesting daemon if no job image is specified. | ## The `[runners.autoscaler.vm_isolation.connector_config]` section The parameters for the `[runners.autoscaler.vm_isolation.connector_config]` section are identical to the [`[runners.autoscaler.connector_config]`](#the-runnersautoscalerconnector_config-section) section, but are used to connect to the `nesting` provisioned virtual machine, rather than the autoscaled instance. ## The `[runners.custom]` section The following parameters define configuration for the [custom executor](../executors/custom.md). | Parameter | Type | Description | |-------------------------|--------------|-------------| | `config_exec` | string | Path to an executable, so a user can override some configuration settings before the job starts. These values override the ones set in the [`[[runners]]`](#the-runners-section) section. [The custom executor documentation](../executors/custom.md#config) has the full list. | | `config_args` | string array | First set of arguments passed to the `config_exec` executable. | | `config_exec_timeout` | integer | Timeout, in seconds, for `config_exec` to finish execution. Default is 3600 seconds (1 hour). | | `prepare_exec` | string | Path to an executable to prepare the environment. | | `prepare_args` | string array | First set of arguments passed to the `prepare_exec` executable. | | `prepare_exec_timeout` | integer | Timeout, in seconds, for `prepare_exec` to finish execution. Default is 3600 seconds (1 hour). | | `run_exec` | string | **Required**. Path to an executable to run scripts in the environments. For example, the clone and build script. | | `run_args` | string array | First set of arguments passed to the `run_exec` executable. | | `cleanup_exec` | string | Path to an executable to clean up the environment. | | `cleanup_args` | string array | First set of arguments passed to the `cleanup_exec` executable. | | `cleanup_exec_timeout` | integer | Timeout, in seconds, for `cleanup_exec` to finish execution. Default is 3600 seconds (1 hour). | | `graceful_kill_timeout` | integer | Time to wait, in seconds, for `prepare_exec` and `cleanup_exec` if they are terminated (for example, during job cancellation). After this timeout, the process is killed. Default is 600 seconds (10 minutes). | | `force_kill_timeout` | integer | Time to wait, in seconds, after the kill signal is sent to the script. Default is 600 seconds (10 minutes). | ## The `[runners.cache]` section The following parameters define the distributed cache feature. View details in the [runner autoscale documentation](autoscale.md#distributed-runners-caching). | Parameter | Type | Description | |--------------------------|---------|-------------| | `Type` | string | One of: `s3`, `gcs`, `azure`. | | `Path` | string | Name of the path to prepend to the cache URL. | | `Shared` | boolean | Enables cache sharing between runners. Default is `false`. | | `MaxUploadedArchiveSize` | int64 | Limit, in bytes, of the cache archive being uploaded to cloud storage. A malicious actor can work around this limit so the GCS adapter enforces it through the X-Goog-Content-Length-Range header in the signed URL. You should also set the limit on your cloud storage provider. | You can use the following environment variables to configure cache compression: | Variable | Description | Default | Values | |----------------------------|---------------------------------------|-----------|-------------------------------------------------| | `CACHE_COMPRESSION_FORMAT` | Compression format for cache archives | `zip` | `zip`, `tarzstd` | | `CACHE_COMPRESSION_LEVEL` | Compression level for cache archives | `default` | `fastest`, `fast`, `default`, `slow`, `slowest` | The `tarzstd` format uses TAR with Zstandard compression, which provides better compression ratios than `zip`. The compression levels range from `fastest` (minimal compression for maximum speed) to `slowest` (maximum compression for smallest file size). The `default` level provides a balanced trade-off between compression ratio and speed. Example: ```yaml job: variables: CACHE_COMPRESSION_FORMAT: tarzstd CACHE_COMPRESSION_LEVEL: fast ``` ### Parallel cache object storage transfers By default, cache downloads use a single HTTP GET or GoCloud read stream, and cache uploads that use the GoCloud path (for example S3 with `RoleARN`) use one concurrent multipart part at a time. You can enable higher throughput on fast links to object storage with the `FF_USE_PARALLEL_CACHE_TRANSFER` [feature flag](feature-flags.md). When it is enabled: - **Downloads** may use multiple concurrent range GETs (presigned URL; a small initial Range request is used instead of HEAD, which often fails for GET-only presigned URLs such as S3) or concurrent GoCloud range reads, when the backend supports ranges and the cache object is larger than one chunk. - **Uploads** on the GoCloud path use multipart uploads with concurrent parts. When the feature flag is off, behavior is unchanged regardless of the variables below. You can tune parallelism with these job environment variables (they are read by the `cache-extractor` and `cache-archiver` helpers): | Variable | Description | Default | |------------------------------|-----------------------------------------------------------------------------|---------| | `CACHE_CHUNK_SIZE` | Chunk size in bytes for parallel range downloads and multipart part size for GoCloud uploads | `16777216` (16 MiB) | | `CACHE_CONCURRENCY` | Number of concurrent range downloads or concurrent upload parts (GoCloud). Use `0` or `1` for sequential downloads. | `16` | | `CACHE_TRANSFER_BUFFER_SIZE` | Buffer size in bytes when streaming to or from the archive file | `4194304` (4 MiB) | Example: ```yaml job: variables: FF_USE_PARALLEL_CACHE_TRANSFER: "true" CACHE_CONCURRENCY: "8" CACHE_CHUNK_SIZE: "16777216" ``` ### Parallel artifact downloads (direct download) By default, when [`direct_download`](https://docs.gitlab.com/ci/jobs/job_artifacts/#download-artifacts-from-a-job) returns a redirect to object storage, the runner downloads artifacts with a single HTTP GET stream. Enable the `FF_USE_PARALLEL_ARTIFACT_TRANSFER` [feature flag](feature-flags.md) to allow parallel HTTP Range GETs when the object storage backend supports `206 Partial Content` with a `Content-Range` total. Chunk size and concurrency are fixed in the runner (not `CACHE_*` variables). This flag is independent of `FF_USE_PARALLEL_CACHE_TRANSFER`. Example: ```yaml job: variables: FF_USE_PARALLEL_ARTIFACT_TRANSFER: "true" ``` The cache mechanism uses pre-signed URLs to upload and download cache. URLs are signed by GitLab Runner on its own instance. It does not matter if the job's script (including the cache upload/download script) are executed on local or external machines. For example, `shell` or `docker` executors run their scripts on the same machine where the GitLab Runner process is running. At the same time, `virtualbox` or `docker+machine` connects to a separate VM to execute the script. This process is for security reasons: minimizing the possibility of leaking the cache adapter's credentials. If the [S3 cache adapter](#the-runnerscaches3-section) is configured to use an IAM instance profile, the adapter uses the profile attached to the GitLab Runner machine. Similarly for [GCS cache adapter](#the-runnerscachegcs-section), if configured to use the `CredentialsFile`. The file needs to be present on the GitLab Runner machine. This table lists `config.toml`, CLI options, and environment variables for `register`. When you define these environment variables, the values are saved in `config.toml` after you register a new GitLab Runner. If you want to omit S3 credentials from `config.toml` and load static credentials from the environment, you can define `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. For more information, see [AWS SDK default credential chain section](#aws-sdk-default-credential-chain). | Setting | TOML field | CLI option for `register` | Environment variable for `register` | |--------------------------------|---------------------------------------------------|--------------------------------------------|-------------------------------------| | `Type` | `[runners.cache] -> Type` | `--cache-type` | `$CACHE_TYPE` | | `Path` | `[runners.cache] -> Path` | `--cache-path` | `$CACHE_PATH` | | `Shared` | `[runners.cache] -> Shared` | `--cache-shared` | `$CACHE_SHARED` | | `S3.ServerAddress` | `[runners.cache.s3] -> ServerAddress` | `--cache-s3-server-address` | `$CACHE_S3_SERVER_ADDRESS` | | `S3.AccessKey` | `[runners.cache.s3] -> AccessKey` | `--cache-s3-access-key` | `$CACHE_S3_ACCESS_KEY` | | `S3.SecretKey` | `[runners.cache.s3] -> SecretKey` | `--cache-s3-secret-key` | `$CACHE_S3_SECRET_KEY` | | `S3.SessionToken` | `[runners.cache.s3] -> SessionToken` | `--cache-s3-session-token` | `$CACHE_S3_SESSION_TOKEN` | | `S3.BucketName` | `[runners.cache.s3] -> BucketName` | `--cache-s3-bucket-name` | `$CACHE_S3_BUCKET_NAME` | | `S3.BucketLocation` | `[runners.cache.s3] -> BucketLocation` | `--cache-s3-bucket-location` | `$CACHE_S3_BUCKET_LOCATION` | | `S3.Insecure` | `[runners.cache.s3] -> Insecure` | `--cache-s3-insecure` | `$CACHE_S3_INSECURE` | | `S3.AuthenticationType` | `[runners.cache.s3] -> AuthenticationType` | `--cache-s3-authentication_type` | `$CACHE_S3_AUTHENTICATION_TYPE` | | `S3.ServerSideEncryption` | `[runners.cache.s3] -> ServerSideEncryption` | `--cache-s3-server-side-encryption` | `$CACHE_S3_SERVER_SIDE_ENCRYPTION` | | `S3.ServerSideEncryptionKeyID` | `[runners.cache.s3] -> ServerSideEncryptionKeyID` | `--cache-s3-server-side-encryption-key-id` | `$CACHE_S3_SERVER_SIDE_ENCRYPTION_KEY_ID` | | `S3.DualStack` | `[runners.cache.s3] -> DualStack` | `--cache-s3-dual-stack` | `$CACHE_S3_DUAL_STACK` | | `S3.Accelerate` | `[runners.cache.s3] -> Accelerate` | `--cache-s3-accelerate` | `$CACHE_S3_ACCELERATE` | | `S3.PathStyle` | `[runners.cache.s3] -> PathStyle` | `--cache-s3-path-style` | `$CACHE_S3_PATH_STYLE` | | `S3.RoleARN` | `[runners.cache.s3] -> RoleARN` | `--cache-s3-role-arn` | `$CACHE_S3_ROLE_ARN` | | `S3.UploadRoleARN` | `[runners.cache.s3] -> UploadRoleARN` | `--cache-s3-upload-role-arn` | `$CACHE_S3_UPLOAD_ROLE_ARN` | | `S3.AssumeRoleMaxConcurrency` | `[runners.cache.s3] -> AssumeRoleMaxConcurrency` | `--cache-s3-assume-role-max-concurrency` | `$CACHE_S3_ASSUME_ROLE_MAX_CONCURRENCY` | | `GCS.AccessID` | `[runners.cache.gcs] -> AccessID` | `--cache-gcs-access-id` | `$CACHE_GCS_ACCESS_ID` | | `GCS.PrivateKey` | `[runners.cache.gcs] -> PrivateKey` | `--cache-gcs-private-key` | `$CACHE_GCS_PRIVATE_KEY` | | `GCS.CredentialsFile` | `[runners.cache.gcs] -> CredentialsFile` | `--cache-gcs-credentials-file` | `$GOOGLE_APPLICATION_CREDENTIALS` | | `GCS.BucketName` | `[runners.cache.gcs] -> BucketName` | `--cache-gcs-bucket-name` | `$CACHE_GCS_BUCKET_NAME` | | `Azure.AccountName` | `[runners.cache.azure] -> AccountName` | `--cache-azure-account-name` | `$CACHE_AZURE_ACCOUNT_NAME` | | `Azure.AccountKey` | `[runners.cache.azure] -> AccountKey` | `--cache-azure-account-key` | `$CACHE_AZURE_ACCOUNT_KEY` | | `Azure.ContainerName` | `[runners.cache.azure] -> ContainerName` | `--cache-azure-container-name` | `$CACHE_AZURE_CONTAINER_NAME` | | `Azure.StorageDomain` | `[runners.cache.azure] -> StorageDomain` | `--cache-azure-storage-domain` | `$CACHE_AZURE_STORAGE_DOMAIN` | ### Cache key handling {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5751) in GitLab Runner 18.4.0. - Object path in distributed caches [changed](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6628) in GitLab Runner 19.0 to include a shard prefix when `FF_HASH_CACHE_KEYS` is enabled. {{< /history >}} In GitLab Runner 18.4.0 and later, you can hash cache keys with the `FF_HASH_CACHE_KEYS` [feature flag](feature-flags.md). When `FF_HASH_CACHE_KEYS` is turned off (default), GitLab Runner sanitizes the cache key before using it to build the path for both the local cache file and the object in the storage bucket. If the sanitization changes the cache key, GitLab Runner logs this change. If GitLab Runner cannot sanitize the cache key, it also logs this, and does not use this specific cache. When you turn on this feature flag, GitLab Runner hashes the cache key (SHA-256) before using it to build the path for the local cache artifact and the object in the remote storage bucket. GitLab Runner does not sanitize the cache key. To help you understand which cache key created a specific cache artifact, GitLab Runner attaches metadata to it: - For local cache artifacts, GitLab Runner places a `metadata.json` file next to the cache artifact `cache.zip`, with the following content: ```json {"cachekey": "the human readable cache key"} ``` - For cache artifacts on distributed caches, GitLab Runner attaches the metadata directly to the storage object blob, with the key `cachekey`. You can query it using the cloud provider's mechanisms. For an example, see the [user-defined object metadata](https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html#UserMetadata) for AWS S3. #### Distributed cache object path with `FF_HASH_CACHE_KEYS` In GitLab Runner 19.0 and later, when `FF_HASH_CACHE_KEYS` is enabled, GitLab Runner inserts the first two hexadecimal characters of the SHA-256 hash as a shard prefix in the distributed cache object path: ```plaintext [path/][runner//]project////cache.zip ``` For example: ```plaintext runner/abc123/project/42/d0/d03a852ba491ba611e907b1ef60ad5c4516a05b8f3aae6abb77f42bc60325aed/cache.zip ``` This distributes cache objects across 256 distinct object prefixes per project, which prevents [Amazon S3 503 (Slow Down) responses](https://docs.aws.amazon.com/AmazonS3/latest/userguide/optimizing-performance.html) when many parallel jobs access the cache at high request rates. > [!warning] > Upgrading to GitLab Runner 19.0 is a breaking change if you use `FF_HASH_CACHE_KEYS`. > If you already have `FF_HASH_CACHE_KEYS` enabled and upgrade to GitLab Runner 19.0 > or later, the shard prefix changes the object path for all cache artifacts in > distributed storage. Existing objects stored at the old path > (`...//cache.zip`) become unreachable. Expect cache misses and cache > artifacts rebuild on the first job run after upgrade. #### Cache key handling behavior summary When you change `FF_HASH_CACHE_KEYS`, GitLab Runner ignores existing cache artifacts because hashing the cache key changes the cache artifact's name and location. This change applies in both directions, from `FF_HASH_CACHE_KEYS=true` to `FF_HASH_CACHE_KEYS=false` and vice versa. If you run multiple runners that share a distributed cache but have different settings for `FF_HASH_CACHE_KEYS`, they do not share cache artifacts. Therefore, best practice is: - Keep `FF_HASH_CACHE_KEYS` in sync across runners which share distributed caches. - Expect cache misses, cache artifacts rebuild, and longer first job runs after you change `FF_HASH_CACHE_KEYS`. > [!warning] > If you turn on `FF_HASH_CACHE_KEYS` but run an older version of the helper binary > (for example, because you pinned the helper image to an older version), hashing the > cache key and uploading or downloading caches still works. However, GitLab Runner > does not maintain the metadata of cache artifacts. ### The `[runners.cache.s3]` section The following parameters define S3 storage for cache. | Parameter | Type | Description | |-----------------------------|---------|-------------| | `ServerAddress` | string | A `host:port` for the S3-compatible server. If you are using a server other than AWS, consult the storage product documentation to determine the correct address. For DigitalOcean, the address must be in the format `spacename.region.digitaloceanspaces.com`. | | `AccessKey` | string | The access key specified for your S3 instance. | | `SecretKey` | string | The secret key specified for your S3 instance. | | `SessionToken` | string | The session token specified for your S3 instance when temporary credentials are used. | | `BucketName` | string | Name of the storage bucket where cache is stored. | | `BucketLocation` | string | Name of S3 region. | | `Insecure` | boolean | Set to `true` if the S3 service is available by `HTTP`. Default is `false`. | | `AuthenticationType` | string | Set to `iam` or `access-key`. Default is `access-key` if `ServerAddress`, `AccessKey`, and `SecretKey` are all provided. Defaults to `iam` if `ServerAddress`, `AccessKey`, or `SecretKey` are missing. | | `ServerSideEncryption` | string | The server-side encryption type to use with S3. In GitLab 15.3 and later, available types are `S3`, or `KMS`. In GitLab 17.5 and later, [`DSSE-KMS`](https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingDSSEncryption.html) is supported. | | `ServerSideEncryptionKeyID` | string | The alias, ID, or ARN of a KMS key used for encryption when you use KMS. If you use an alias, prefix it with `alias/`. Use ARN format for cross-account scenarios. Available in GitLab 15.3 and later. | | `DualStack` | boolean | Enables IPv4 and IPv6 endpoints. Default is `true`. Disable this setting if you are using AWS S3 Express. GitLab ignores this setting if you set `ServerAddress`. Available in GitLab 17.5 and later. | | `Accelerate` | boolean | Enables AWS S3 Transfer Acceleration. GitLab sets this to `true` automatically if `ServerAddress` is configured as an Accelerated endpoint. Available in GitLab 17.5 and later. | | `PathStyle` | boolean | Enables path-style access. By default, GitLab automatically detects this setting based on the `ServerAddress` value. Available in GitLab 17.5 and later. | | `UploadRoleARN` | string | Deprecated. Use `RoleARN` instead. Specifies an AWS role ARN that can be used with `AssumeRole` to generate time-limited `PutObject` S3 requests. Enables S3 multipart uploads. Available in GitLab 17.5 and later. | | `RoleARN` | string | Specifies an AWS role ARN that can be used with `AssumeRole` to generate time-limited `GetObject` and `PutObject` S3 requests. Enables S3 multipart transfers. Available in GitLab 17.8 and later. | | `AssumeRoleMaxConcurrency` | integer | Maximum concurrent `AssumeRole` requests to AWS STS when `RoleARN` is set. Defaults to `5`. Set to `-1` to remove the limit. | Example: ```toml [runners.cache] Type = "s3" Path = "path/to/prefix" Shared = false [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "AWS_S3_ACCESS_KEY" SecretKey = "AWS_S3_SECRET_KEY" BucketName = "runners-cache" BucketLocation = "eu-west-1" Insecure = false ServerSideEncryption = "KMS" ServerSideEncryptionKeyID = "alias/my-key" ``` ## Authentication GitLab Runner uses different authentication methods for S3 based on your configuration. ### Static credentials The runner uses static access key authentication when: - `ServerAddress`, `AccessKey`, and `SecretKey` parameters are specified but `AuthenticationType` is not provided. - `AuthenticationType = "access-key"` is explicitly set. ### AWS SDK default credential chain The runner uses the [AWS SDK default credential chain](https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials) when: - Any of `ServerAddress`, `AccessKey`, or `SecretKey` are omitted and `AuthenticationType` is not provided. - `AuthenticationType = "iam"` is explicitly set. The credential chain attempts authentication in the following order: 1. Environment variables (`AWS_ACCESS_KEY_ID`, `AWS_SECRET_ACCESS_KEY`) 1. Shared credentials file (`~/.aws/credentials`) 1. IAM instance profile (for EC2 instances) 1. Other AWS credential sources supported by the SDK If `RoleARN` is not specified, the default credential chain is executed by the runner manager, which is often not necessarily on the same machine where the build runs. For example, in an [autoscale](autoscale.md) configuration, the job runs on a different machine. Similarly, with the Kubernetes executor, the build pod can also run on a different node than the runner manager. This behavior makes it possible to grant bucket-level access only to the runner manager. If `RoleARN` is specified, the credentials are resolved within the execution context of the helper image. For more information, see [RoleARN](#enable-multipart-transfers-with-rolearn). When you use Helm charts to install GitLab Runner, and `rbac.create` is set to `true` in the `values.yaml` file, a service account is created. The service account's annotations are retrieved from the `rbac.serviceAccountAnnotations` section. For runners on Amazon EKS, you can specify an IAM role to assign to the service account. The specific annotation needed is: `eks.amazonaws.com/role-arn: arn:aws:iam:::role/`. The IAM policy for this role must have permissions to do the following actions for the specified bucket: - `s3:PutObject` - `s3:GetObjectVersion` - `s3:GetObject` - `s3:DeleteObject` - `s3:ListBucket` If you use `ServerSideEncryption` of type `KMS`, this role must also have permission to do the following actions for the specified AWS KMS Key: - `kms:Encrypt` - `kms:Decrypt` - `kms:ReEncrypt*` - `kms:GenerateDataKey*` - `kms:DescribeKey` `ServerSideEncryption` of type `SSE-C` is not supported. `SSE-C` requires that the headers, which contain the user-supplied key, are provided for the download request, in addition to the pre-signed URL. This would mean passing the key material to the job, where the key can't be kept safe. This does have the potential to leak the decryption key. A discussion about this issue is in [this merge request](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3295). > [!note] > The maximum size of a single file that can be uploaded to AWS S3 cache is 5 GB. > A discussion about potential workarounds for this behavior is in [this issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921). #### Use KMS key encryption in S3 bucket for runner cache The `GenerateDataKey` API uses the KMS symmetric key to create a data key for client-side encryption (). KMS key configuration must be as follows: | Attribute | Description | |-----------|-------------| | Key Type | Symmetric | | Origin | `AWS_KMS` | | Key Spec | `SYMMETRIC_DEFAULT` | | Key Usage | Encrypt and decrypt | The IAM policy for the role assigned to the ServiceAccount defined in `rbac.serviceAccountName` must have permissions to do the following actions for the KMS Key: - `kms:GetPublicKey` - `kms:Decrypt` - `kms:Encrypt` - `kms:DescribeKey` - `kms:GenerateDataKey` #### Enable multipart transfers with `RoleARN` To limit access to the cache, the runner manager generates timed-limited, [pre-signed URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) for jobs to download from and upload to the cache. However, AWS S3 limits a [single PUT request to 5 GB](https://docs.aws.amazon.com/AmazonS3/latest/userguide/upload-objects.html). For files larger than 5 GB, you must use the multipart upload API. Multipart transfers are only supported with AWS S3 and not for other S3 providers. Because the runner manager handles jobs for different projects, the runner manager cannot pass around S3 credentials that have bucket-wide permissions. Instead, the runner manger uses time-limited pre-signed URLs and narrowly-scoped credentials to restrict access to one specific object. To use S3 multipart transfers with AWS, specify an IAM role in `RoleARN` in the `arn:aws:iam::::` format. This role generates time-limited AWS credentials that are narrowly scoped to write to a specific blob in the bucket. Ensure that your original S3 credentials can access `AssumeRole` for the specified `RoleARN`. The IAM role specified in `RoleARN` must have the following permissions: - `s3:GetObject` access to the bucket specified in `BucketName`. - `s3:PutObject` access to the bucket specified in `BucketName`. - `s3:ListBucket` access to the bucket specified in `BucketName`. - `kms:Decrypt` and `kms:GenerateDataKey` if server side encryption with KMS or DSSE-KMS is enabled. For example, suppose you have an IAM role called `my-instance-role` attached to an EC2 instance with the ARN `arn:aws:iam::1234567890123:role/my-instance-role`. You can create a new role `arn:aws:iam::1234567890123:role/my-upload-role` that only has `s3:PutObject` permissions for `BucketName`. In the AWS settings for `my-instance-role`, the `Trust relationships` might look similar to this: ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam::1234567890123:role/my-upload-role" }, "Action": "sts:AssumeRole" } ] } ``` You can also reuse `my-instance-role` as the `RoleARN` and avoid creating a new role. Make sure that `my-instance-role` has the `AssumeRole` permission. For example, an IAM profile associated with an EC2 instance might have the following `Trust relationships`: ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "Service": "ec2.amazonaws.com", "AWS": "arn:aws:iam::1234567890123:role/my-instance-role" }, "Action": "sts:AssumeRole" } ] } ``` You can use the AWS command-line interface to verify that your instance has the `AssumeRole` permission. For example: ```shell aws sts assume-role --role-arn arn:aws:iam::1234567890123:role/my-upload-role --role-session-name gitlab-runner-test1 ``` ##### How uploads work with `RoleARN` If `RoleARN` is present, every time the runner uploads to the cache: 1. The runner manager retrieves the original S3 credentials (specified through `AuthenticationType`, `AccessKey`, and `SecretKey`). 1. With the S3 credentials, the runner manager sends a request to the Amazon Security Token Service (STS) for [`AssumeRole`](https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html) with `RoleARN`. The policy request looks similar to this: ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": ["s3:PutObject"], "Resource": "arn:aws:s3:::/" } ] } ``` 1. If the request is successful, the runner manager obtains temporary AWS credentials with a restricted session. 1. The runner manager passes these credentials and URL in the `s3:///` format to the cache archiver, which then uploads the file. ##### AssumeRole Prometheus metrics When `RoleARN` is set, GitLab Runner exposes the following Prometheus metrics for monitoring STS request behavior: | Metric | Type | Description | |--------|------|-------------| | `gitlab_runner_cache_s3_assume_role_requests_in_flight` | Gauge | Number of `AssumeRole` requests to AWS STS in progress. | | `gitlab_runner_cache_s3_assume_role_wait_seconds` | Histogram | Wait time to acquire a concurrency slot before issuing an `AssumeRole` request. | | `gitlab_runner_cache_s3_assume_role_duration_seconds` | Histogram | Duration of `AssumeRole` API calls to AWS STS. | | `gitlab_runner_cache_s3_assume_role_cache_hits_total` | Counter | Number of `AssumeRole` credential cache hits (STS call avoided). | | `gitlab_runner_cache_s3_assume_role_cache_misses_total` | Counter | Number of `AssumeRole` credential cache misses (STS call made). | | `gitlab_runner_cache_s3_assume_role_cached_credentials` | Gauge | Number of `AssumeRole` credentials held in the in-memory LRU cache. | | `gitlab_runner_cache_s3_assume_role_failures_total` | Counter | Number of failed `AssumeRole` requests. | #### Enable IAM roles for Kubernetes ServiceAccount resources To use IAM roles for service accounts, an IAM OIDC provider [must exist for your cluster](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html). After an IAM OIDC provider is associated with your cluster, you can create an IAM role to associate to the service account of the runner. 1. On the **Create Role** window, under **Select type of trusted entity**, select **Web Identity**. 1. On the **Trusted Relationships tab** of the role: - The **Trusted entities** section must have the format: `arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/`. The **OIDC ID** can be found on EKS cluster's **Configuration** tab. - The **Condition** section must have the GitLab Runner service account defined in `rbac.serviceAccountName` or the default service account created if `rbac.create` is set to `true`: | Condition | Key | Value | |----------------|--------------------------------------------------------|-------| | `StringEquals` | `oidc.eks..amazonaws.com/id/:sub` | `system:serviceaccount::` | #### Use S3 Express One Zone buckets {{< history >}} - Introduced in GitLab Runner 17.5.0. {{< /history >}} > [!note] > [S3 Express One Zone directory buckets do not work with `RoleARN`](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38484#note_2313111840) because the runner manager cannot restrict access to one specific object. 1. Set up an S3 Express One Zone bucket by following the [Amazon tutorial](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-express-getting-started.html). 1. Configure `config.toml` with `BucketName` and `BucketLocation`. 1. Set `DualStack` to `false` as S3 Express does not support dual-stack endpoints. Example `config.toml`: ```toml [runners.cache] Type = "s3" [runners.cache.s3] BucketName = "example-express--usw2-az1--x-s3" BucketLocation = "us-west-2" DualStack = false ``` ### The `[runners.cache.gcs]` section The following parameters define native support for Google Cloud Storage. For more information about these values, see the [Google Cloud Storage (GCS) authentication documentation](https://docs.cloud.google.com/storage/docs/authentication#service_accounts). | Parameter | Type | Description | |-------------------|--------|-------------| | `CredentialsFile` | string | Path to the Google JSON key file. Only the `service_account` type is supported. If configured, this value takes precedence over the `AccessID` and `PrivateKey` configured directly in `config.toml`. | | `AccessID` | string | ID of GCP Service Account used to access the storage. | | `PrivateKey` | string | Private key used to sign GCS requests. | | `BucketName` | string | Name of the storage bucket where cache is stored. | | `UniverseDomain` | string | Universe domain for GCS requests (optional). For public Google Cloud, use `googleapis.com`. For Google Cloud Dedicated or other custom universe domains, specify the appropriate domain (for example, `custom.universe.com`). If you don't specify a domain, the default is `googleapis.com`. | Examples: **Credentials configured directly in `config.toml` file**: ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] AccessID = "cache-access-account@test-project-123456.iam.gserviceaccount.com" PrivateKey = "-----BEGIN PRIVATE KEY-----\nXXXXXX\n-----END PRIVATE KEY-----\n" BucketName = "runners-cache" UniverseDomain = "googleapis.com" # Optional ``` **Credentials in JSON file downloaded from GCP**: ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] CredentialsFile = "/etc/gitlab-runner/service-account.json" BucketName = "runners-cache" UniverseDomain = "googleapis.com" # Optional ``` **Application Default Credentials (ADC) from the metadata server in GCP**: When you use GitLab Runner with Google Cloud ADC, you typically use the default service account. Then you don't need to supply credentials for the instance: ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] BucketName = "runners-cache" UniverseDomain = "googleapis.com" # Optional ``` If you use ADC, be sure that the service account that you use has the `iam.serviceAccounts.signBlob` permission. Typically this is done by granting the [Service Account Token Creator role](https://docs.cloud.google.com/iam/docs/service-account-permissions#token-creator-role) to the service account. #### Workload Identity Federation for GKE Workload Identity Federation for GKE is supported with application default credentials (ADC). If you have issues getting workload identities to work: - Check the runner pod logs (not the build log) for the message `ERROR: generating signed URL`. This error might indicate a permission issue, such as: ```plaintext IAM returned 403 Forbidden: Permission 'iam.serviceAccounts.getAccessToken' denied on resource (or it may not exist). ``` - Try the following `curl` commands from within the runner pod: ```shell curl -H "Metadata-Flavor: Google" http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/email ``` This command should return the correct Kubernetes service account. Next, try to obtain an access token: ```shell curl -H "Metadata-Flavor: Google" http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform ``` If the command succeeds, the result returns a JSON payload with an access token. If it fails, check the service account permissions. ### The `[runners.cache.azure]` section The following parameters define native support for Azure Blob Storage. To learn more, view the [Azure Blob Storage documentation](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction). While S3 and GCS use the word `bucket` for a collection of objects, Azure uses the word `container` to denote a collection of blobs. | Parameter | Type | Description | |-----------------|--------|-------------| | `AccountName` | string | Name of the Azure Blob Storage account used to access the storage. | | `AccountKey` | string | Storage account access key used to access the container. To omit `AccountKey` from the configuration, use [Azure workload or managed identities](#azure-workload-and-managed-identities). | | `ContainerName` | string | Name of the [storage container](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction#containers) to save cache data in. | | `StorageDomain` | string | Domain name [used to service Azure storage endpoints](https://learn.microsoft.com/en-us/azure/china/resources-developer-guide#check-endpoints-in-azure) (optional). Default is `blob.core.windows.net`. | Example: ```toml [runners.cache] Type = "azure" Path = "path/to/prefix" Shared = false [runners.cache.azure] AccountName = "" AccountKey = "" ContainerName = "runners-cache" StorageDomain = "blob.core.windows.net" ``` #### Azure workload and managed identities {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27303) in GitLab Runner v17.5.0. {{< /history >}} To use Azure workload or managed identities, omit `AccountKey` from the configuration. When `AccountKey` is blank, the runner attempts to: 1. Obtain temporary credentials by using [`DefaultAzureCredential`](https://github.com/Azure/azure-sdk-for-go/blob/main/sdk/azidentity/README.md#defaultazurecredential). 1. Get a [User Delegation Key](https://learn.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key). 1. Generate a SAS token with that key to access a Storage Account blob. Ensure that the instance has the `Storage Blob Data Contributor` role assigned to it. If the instance does not have access to perform the actions above, GitLab Runner reports an `AuthorizationPermissionMismatch` error. To use Azure workload identities, add the `service_account` associated with the identity and the pod label `azure.workload.identity/use` in the `runner.kubernetes` section. For example, if `service_account` is `gitlab-runner`: ```toml [runners.kubernetes] service_account = "gitlab-runner" [runners.kubernetes.pod_labels] "azure.workload.identity/use" = "true" ``` Ensure that the `service_account` has the `azure.workload.identity/client-id` annotation associated with it: ```yaml serviceAccount: annotations: azure.workload.identity/client-id: ``` For GitLab 17.7 and later, this configuration is sufficient to set up workload identities. However, for GitLab Runner 17.5 and 17.6, you must also configure the runner manager with: - The `azure.workload.identity/use` pod label - A service account to use with the workload identity For example, with the GitLab Runner Helm chart: ```yaml serviceAccount: name: "gitlab-runner" podLabels: azure.workload.identity/use: "true" ``` The label is needed because the credentials are retrieved from different sources. For cache downloads, the credentials are retrieved from the runner manager. For cache uploads, credentials are retrieved from the pod that runs the [helper image](#helper-image). For more details, see [issue 38330](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38330). ## The `[runners.kubernetes]` section The following table lists configuration parameters available for the Kubernetes executor. For more parameters, see the [documentation for the Kubernetes executor](../executors/kubernetes/_index.md). | Parameter | Type | Description | |------------------------------|---------|-------------| | `host` | string | Optional. Kubernetes host URL. If not specified, the runner attempts to auto-discovery it. | | `cert_file` | string | Optional. Kubernetes auth certificate. | | `key_file` | string | Optional. Kubernetes auth private key. | | `ca_file` | string | Optional. Kubernetes auth ca certificate. | | `image` | string | Default container image to use for jobs when none is specified. | | `allowed_images` | array | Wildcard list of container images that are allowed in `.gitlab-ci.yml`. If not present all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) or [Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executors. | | `allowed_services` | array | Wildcard list of services that are allowed in `.gitlab-ci.yml`. If not present all images are allowed (equivalent to `["*/*:*"]`). Use with the [Docker](../executors/docker.md#restrict-docker-images-and-services) or [Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executors. | | `namespace` | string | Namespace to run Kubernetes jobs in. | | `privileged` | boolean | Run all containers with the privileged flag enabled. | | `allow_privilege_escalation` | boolean | Optional. Runs all containers with the `allowPrivilegeEscalation` flag enabled. | | `node_selector` | table | A `table` of `key=value` pairs of `string=string`. Limits the creation of pods to Kubernetes nodes that match all the `key=value` pairs. | | `image_pull_secrets` | array | An array of items containing the Kubernetes `docker-registry` secret names used to authenticate container images pulling from private registries. | | `logs_base_dir` | string | Base directory to be prepended to the generated path to store build logs. [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760) in GitLab Runner 17.2. | | `scripts_base_dir` | string | Base directory to be prepended to the generated path to store build scripts. [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760) in GitLab Runner 17.2. | | `service_account` | string | Default service account that job/executor pods use to communicate with the Kubernetes API. | Example: ```toml [runners.kubernetes] host = "https://45.67.34.123:4892" cert_file = "/etc/ssl/kubernetes/api.crt" key_file = "/etc/ssl/kubernetes/api.key" ca_file = "/etc/ssl/kubernetes/ca.crt" image = "golang:1.8" privileged = true allow_privilege_escalation = true image_pull_secrets = ["docker-registry-credentials", "optional-additional-credentials"] allowed_images = ["ruby:*", "python:*", "php:*"] allowed_services = ["postgres:9.4", "postgres:latest"] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" [runners.kubernetes.node_selector] gitlab = "true" ``` ## Helper image When you use `docker`, `docker+machine`, or `kubernetes` executors, GitLab Runner uses a specific container to handle Git, artifacts, and cache operations. This container is created from an image named `helper image`. The helper image is available for amd64, arm, arm64, s390x, ppc64le, and riscv64 architectures. It contains a `gitlab-runner-helper` binary, which is a special compilation of GitLab Runner binary. It contains only a subset of available commands, and Git, Git LFS, and SSL certificates store. The helper image has a few flavors: `alpine`, `alpine3.21`, `alpine-latest`, `ubi-fips` and `ubuntu`. The `alpine` image is the default due to its small footprint. Using `helper_image_flavor = "ubuntu"` selects the `ubuntu` flavor of the helper image. In GitLab Runner 16.1 to 17.1, the `alpine` flavor is an alias for `alpine3.18`. In GitLab Runner 17.2 to 17.6, it's an alias for `alpine3.19`. In GitLab Runner 17.7 and later, it's an alias for `alpine3.21`. In GitLab Runner 18.4 and later, it's an alias for `alpine-latest`. The `alpine-latest` flavor uses `alpine:latest` as its base image, and will naturally increment versions as new upstream versions are released. When GitLab Runner is installed from the `DEB` or `RPM` packages, images for the supported architectures are installed on the host. If Docker Engine can't find the specified image version, the runner automatically downloads it before running the job. Both the `docker` and `docker+machine` executors work this way. For the `alpine` flavors, only the default `alpine` flavor image is included in the package. All other flavors are downloaded from the registry. The `kubernetes` executor and manual installations of GitLab Runner work differently. - For manual installations, the `gitlab-runner-helper` binary is not included. - For the `kubernetes` executor, the Kubernetes API doesn't allow the `gitlab-runner-helper` image to be loaded from a local archive. In both cases, GitLab Runner [downloads the helper image](#helper-image-registry). The GitLab Runner revision and architecture define which tag to download. ### Helper image configuration for Kubernetes on Arm By default, the correct [helper image for your architecture](../executors/kubernetes/_index.md#operating-system-architecture-and-windows-kernel-version) is selected. If you need to set a custom `helper_image` path to use the `arm64` helper image on `arm64` Kubernetes clusters, set the following values in your [configuration file](../executors/kubernetes/_index.md#configuration-settings): ```toml [runners.kubernetes] helper_image = "my.registry.local/gitlab/gitlab-runner-helper:arm64-v${CI_RUNNER_VERSION}" ``` ### Runner images that use an old version of Alpine Linux {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3122) in GitLab Runner 14.5. {{< /history >}} Images are built with multiple versions of Alpine Linux. You can use a newer version of Alpine, but at the same time use older versions as well. For the helper image, change the `helper_image_flavor` or read the [Helper image](#helper-image) section. For the GitLab Runner image, follow the same logic, where `alpine`, `alpine3.19`, `alpine3.21`, or `alpine-latest` is used as a prefix in the image, before the version: ```shell docker pull gitlab/gitlab-runner:alpine3.19-v16.1.0 ``` ### Alpine `pwsh` images As of GitLab Runner 16.1 and later, all `alpine` helper images have a `pwsh` variant. The only exception is `alpine-latest` because the [`powershell` Docker images](https://learn.microsoft.com/en-us/powershell/scripting/install/powershell-in-docker?view=powershell-7.4) on which the GitLab Runner helper images are based do not support `alpine:latest`. Example: ```shell docker pull registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:alpine3.21-x86_64-v17.7.0-pwsh ``` ### Helper image registry In GitLab 15.0 and earlier, you configure helper images to use images from Docker Hub. In GitLab 15.1 and later, the helper image is pulled from the GitLab Container Registry on GitLab.com at `registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}`. GitLab Self-Managed instances also pull the helper image from the GitLab Container Registry on GitLab.com by default. To check the status of the GitLab Container Registry on GitLab.com, see [GitLab System Status](https://status.gitlab.com/). ### Override the helper image In some cases, you might need to override the helper image for the following reasons: 1. **Speed up jobs execution**: In environments with slower internet connection, downloading the same image multiple times can increase the time it takes to execute a job. Downloading the helper image from a local registry, where the exact copy of `registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ` is stored, can speed things up. 1. **Security concerns**: You may not want to download external dependencies that were not checked before. There might be a business rule to use only dependencies that were reviewed and stored in local repositories. 1. **Build environments without internet access**: If you have [Kubernetes clusters installed in an offline environment](../install/operator.md#install-gitlab-runner-operator-on-kubernetes-clusters-in-offline-environments), you can use a local image registry or package repository to pull images used in CI/CD jobs. 1. **Additional software**: You may want to install some additional software to the helper image, like `openssh` to support submodules accessible with `git+ssh` instead of `git+http`. In these cases, you can configure a custom image by using the `helper_image` configuration field, which is available for the `docker`, `docker+machine`, and `kubernetes` executors: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:tag" ``` The version of the helper image should be considered to be strictly coupled with the version of GitLab Runner. One of the main reasons for providing these images is that GitLab Runner is using the `gitlab-runner-helper` binary. This binary is compiled from part of the GitLab Runner source. This binary uses an internal API that is expected to be the same in both binaries. By default, GitLab Runner references a `registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ` image, where `XYZ` is based on the GitLab Runner architecture and Git revision. You can define the image version by using one of the [version variables](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/common/version.go#L60-61): ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}" ``` With this configuration, GitLab Runner instructs the executor to use the image in version `x86_64-v${CI_RUNNER_VERSION}`, which is based on its compilation data. After updating GitLab Runner to a new version, GitLab Runner tries to download the proper image. The image should be uploaded to the registry before upgrading GitLab Runner, otherwise the jobs start failing with a "No such image" error. The helper image is tagged by `$CI_RUNNER_VERSION` in addition to `$CI_RUNNER_REVISION`. Both tags are valid and point to the same image. ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}" ``` #### When using PowerShell Core An additional version of the helper image for Linux, which contains PowerShell Core, is published with the `registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ-pwsh` tag. ## The `[runners.custom_build_dir]` section {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1267) in GitLab Runner 11.10. {{< /history >}} This section defines [custom build directories](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories) parameters. This feature, if not configured explicitly, is enabled by default for `kubernetes`, `docker`, `docker+machine`, `docker autoscaler`, and `instance` executors. For all other executors, it is disabled by default. This feature requires that `GIT_CLONE_PATH` is in a path defined in `runners.builds_dir`. To use the `builds_dir`, use the `$CI_BUILDS_DIR` variable. By default, this feature is enabled only for `docker` and `kubernetes` executors, because they provide a good way to separate resources. This feature can be explicitly enabled for any executor, but use caution when you use it with executors that share `builds_dir` and have `concurrent > 1`. | Parameter | Type | Description | |-----------|---------|-------------| | `enabled` | boolean | Allow user to define a custom build directory for a job. | Example: ```toml [runners.custom_build_dir] enabled = true ``` ### Default Build Directory GitLab Runner clones the repository to a path that exists under a base path better known as the _Builds Directory_. The default location of this base directory depends on the executor. For: - [Kubernetes](../executors/kubernetes/_index.md), [Docker](../executors/docker.md) and [Docker Machine](../executors/docker_machine.md) executors, it is `/builds` inside of the container. - [Instance](../executors/instance.md), it is `~/builds` in the home directory of the user configured to handle the SSH or WinRM connection to the target machine. - [Docker Autoscaler](../executors/docker_autoscaler.md), it is `/builds` inside of the container. - [Shell](../executors/shell.md) executor, it is `$PWD/builds`. - [SSH](../executors/ssh.md), [VirtualBox](../executors/virtualbox.md) and [Parallels](../executors/parallels.md) executors, it is `~/builds` in the home directory of the user configured to handle the SSH connection to the target machine. - [Custom](../executors/custom.md) executors, no default is provided and it must be explicitly configured, otherwise, the job fails. The used _Builds Directory_ may be defined explicitly by the user with the [`builds_dir`](#the-runners-section) setting. > [!note] > You can also specify > [`GIT_CLONE_PATH`](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories) > if you want to clone to a custom directory, and the guideline below > doesn't apply. GitLab Runner uses the _Builds Directory_ for all the jobs that it runs, but nests them using a specific pattern `{builds_dir}/$RUNNER_TOKEN_KEY/$CONCURRENT_PROJECT_ID/$NAMESPACE/$PROJECT_NAME`. For example: `/builds/2mn-ncv-/0/user/playground`. GitLab Runner does not stop you from storing things inside of the _Builds Directory_. For example, you can store tools inside of `/builds/tools` that can be used during CI execution. We **HIGHLY** discourage this, you should never store anything inside of the _Builds Directory_. GitLab Runner should have total control over it and does not provide stability in such cases. If you have dependencies that are required for your CI, you must install them in some other place. ## Cleaning Git configuration {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438) in GitLab Runner 17.10. {{< /history >}} At the beginning and end of every build, GitLab Runner removes the following files from the repository and its submodules: - Git lock files (`{index,shallow,HEAD,config}.lock`) - Post-checkout hooks (`hooks/post-checkout`) If you enable `clean_git_config`, the following additional files or directories are removed from the repository, its submodules, and the Git template directory: - `.git/config` file - `.git/hooks` directory This cleanup prevents custom, ephemeral, or potentially malicious Git configuration from caching between jobs. Before GitLab Runner 17.10, cleanups behaved differently: - Git lock files and Post-checkout hooks cleanup only occurred at the beginning of a job and not at the end. - Other Git configurations (now controlled by `clean_git_config`) were not removed unless `FF_ENABLE_JOB_CLEANUP` was set. When you set this flag, only the main repository's `.git/config` was deleted but not submodule configurations. The `clean_git_config` setting defaults to `true`. But, it defaults to `false` when: - [Shell executor](../executors/shell.md) is used. - [Git strategy](https://docs.gitlab.com/ci/runners/configure_runners/#git-strategy) is set to `none`. Explicit `clean_git_config` configuration takes precedence over the default setting. ## The `[runners.referees]` section Use GitLab Runner referees to pass extra job monitoring data to GitLab. Referees are workers in the runner manager that query and collect additional data related to a job. The results are uploaded to GitLab as job artifacts. ### Use the Metrics Runner referee If the machine or container running the job exposes [Prometheus](https://prometheus.io) metrics, GitLab Runner can query the Prometheus server for the entirety of the job duration. After the metrics are received, they are uploaded as a job artifact that can be used for analysis later. Only the [`docker-machine` executor](../executors/docker_machine.md) supports the referee. ### Configure the Metrics Runner Referee for GitLab Runner Define `[runner.referees]` and `[runner.referees.metrics]` in your `config.toml` file in a `[[runner]]` section and add the following fields: | Setting | Description | |----------------------|-------------| | `prometheus_address` | The server that collects metrics from GitLab Runner instances. It must be accessible by the runner manager when the job finishes. | | `query_interval` | The frequency the Prometheus instance associated with a job is queried for time series data, defined as an interval (in seconds). | | `queries` | An array of [PromQL](https://prometheus.io/docs/prometheus/latest/querying/basics/) queries that are executed for each interval. | Here is a complete configuration example for `node_exporter` metrics: ```toml [[runners]] [runners.referees] [runners.referees.metrics] prometheus_address = "http://localhost:9090" query_interval = 10 metric_queries = [ "arp_entries:rate(node_arp_entries{{selector}}[{interval}])", "context_switches:rate(node_context_switches_total{{selector}}[{interval}])", "cpu_seconds:rate(node_cpu_seconds_total{{selector}}[{interval}])", "disk_read_bytes:rate(node_disk_read_bytes_total{{selector}}[{interval}])", "disk_written_bytes:rate(node_disk_written_bytes_total{{selector}}[{interval}])", "memory_bytes:rate(node_memory_MemTotal_bytes{{selector}}[{interval}])", "memory_swap_bytes:rate(node_memory_SwapTotal_bytes{{selector}}[{interval}])", "network_tcp_active_opens:rate(node_netstat_Tcp_ActiveOpens{{selector}}[{interval}])", "network_tcp_passive_opens:rate(node_netstat_Tcp_PassiveOpens{{selector}}[{interval}])", "network_receive_bytes:rate(node_network_receive_bytes_total{{selector}}[{interval}])", "network_receive_drops:rate(node_network_receive_drop_total{{selector}}[{interval}])", "network_receive_errors:rate(node_network_receive_errs_total{{selector}}[{interval}])", "network_receive_packets:rate(node_network_receive_packets_total{{selector}}[{interval}])", "network_transmit_bytes:rate(node_network_transmit_bytes_total{{selector}}[{interval}])", "network_transmit_drops:rate(node_network_transmit_drop_total{{selector}}[{interval}])", "network_transmit_errors:rate(node_network_transmit_errs_total{{selector}}[{interval}])", "network_transmit_packets:rate(node_network_transmit_packets_total{{selector}}[{interval}])" ] ``` Metrics queries are in `canonical_name:query_string` format. The query string supports two variables that are replaced during execution: | Setting | Description | |--------------|-------------| | `{selector}` | Replaced with a `label_name=label_value` pair that selects metrics generated in Prometheus by a specific GitLab Runner instance. | | `{interval}` | Replaced with the `query_interval` parameter from the `[runners.referees.metrics]` configuration for this referee. | For example, a shared GitLab Runner environment that uses the `docker-machine` executor would have a `{selector}` similar to `node=shared-runner-123`. ================================================ FILE: docs/configuration/autoscale.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Docker Machine Executor autoscale configuration --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!note] > The Docker Machine executor was deprecated in GitLab 17.5 and is scheduled for removal in GitLab 20.0 (May 2027). > While we continue to support the Docker Machine executor till GitLab 20.0, we do not plan to add new features. > We will address only critical bugs that could prevent CI/CD job execution or affect running costs. > If you're using the Docker Machine executor on Amazon Web Services (AWS) EC2, > Microsoft Azure Compute, or Google Compute Engine (GCE), migrate to the > [GitLab Runner Autoscaler](../runner_autoscale/_index.md). With the autoscale feature, you use resources in a more elastic and dynamic way. GitLab Runner can autoscale, so that your infrastructure contains only as many build instances as are necessary at any time. When you configure GitLab Runner to use only autoscale, the system hosting GitLab Runner acts as a bastion for all the machines it creates. This machine is referred to as a "Runner Manager." > [!note] > Docker has deprecated Docker Machine, the underlying technology used to autoscale > runners on public cloud virtual machines. You can read the issue discussing the > [strategy in response to the deprecation of Docker Machine](https://gitlab.com/gitlab-org/gitlab/-/issues/341856) > for more details. Docker Machine autoscaler creates one container per VM, regardless of the `limit` and `concurrent` configuration. When this feature is enabled and configured properly, jobs are executed on machines created _on demand_. Those machines, after the job is finished, can wait to run the next jobs or can be removed after the configured `IdleTime`. In case of many cloud providers, this approach reduces costs by using existing instances. Below, you can see a real life example of the GitLab Runner autoscale feature, tested on GitLab.com for the [GitLab Community Edition](https://gitlab.com/gitlab-org/gitlab-foss) project: ![Real life example of autoscaling](img/autoscale-example.png) Each machine on the chart is an independent cloud instance, running jobs inside of Docker containers. ## System requirements Before configuring autoscale, you must: - [Prepare your own environment](../executors/docker_machine.md#preparing-the-environment). - Optionally use a [forked version](../executors/docker_machine.md#forked-version-of-docker-machine) of Docker machine supplied by GitLab, which has some additional fixes. ## Supported cloud providers The autoscale mechanism is based on [Docker Machine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/). All supported virtualization and cloud provider parameters are available at the GitLab-managed fork of [Docker Machine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/). ## Runner configuration This section describes the significant autoscale parameters. For more configurations details read the [advanced configuration](advanced-configuration.md). ### Runner global options | Parameter | Value | Description | |--------------|---------|-------------| | `concurrent` | integer | Limits how many jobs globally can be run concurrently. This parameter sets the maximum number of jobs that can use _all_ defined runners, both local and autoscale. Together with `limit` (from [`[[runners]]` section](#runners-options)) and `IdleCount` (from [`[runners.machine]` section](advanced-configuration.md#the-runnersmachine-section)) it affects the upper limit of created machines. | ### `[[runners]]` options | Parameter | Value | Description | |------------|---------|-------------| | `executor` | string | To use the autoscale feature, `executor` must be set to `docker+machine`. | | `limit` | integer | Limits how many jobs can be handled concurrently by this specific token. `0` means don't limit. For autoscale, it's the upper limit of machines created by this provider (in conjunction with `concurrent` and `IdleCount`). | ### `[runners.machine]` options Configuration parameter details can be found in [GitLab Runner - Advanced Configuration - The `[runners.machine]` section](advanced-configuration.md#the-runnersmachine-section). ### `[runners.cache]` options Configuration parameter details can be found in [GitLab Runner - Advanced Configuration - The `[runners.cache]` section](advanced-configuration.md#the-runnerscache-section) ### Additional configuration information There is also a special mode, when you set `IdleCount = 0`. In this mode, machines are **always** created **on-demand** before each job (if there is no available machine in idle state). After the job is finished, the autoscaling algorithm works [the same as it is described below](#autoscaling-algorithm-and-parameters). The machine is waiting for the next jobs, and if no one is executed, after the `IdleTime` period, the machine is removed. If there are no jobs, there are no machines in idle state. If the `IdleCount` is set to a value greater than `0`, then idle VMs are created in the background. The runner acquires an existing idle VM before asking for a new job. - If the job is assigned to the runner, then that job is sent to the previously acquired VM. - If the job is not assigned to the runner, then the lock on the idle VM is released and the VM is returned back to the pool. ## Limit the number of VMs created by the Docker Machine executor To limit the number of virtual machines (VMs) created by the Docker Machine executor, use the `limit` parameter in the `[[runners]]` section of the `config.toml` file. The `concurrent` parameter **does not** limit the number of VMs. One process can be configured to manage multiple runner workers. For more information, see [Basic configuration: one runner manager, one runner](../fleet_scaling/_index.md#basic-configuration-one-runner-manager-one-runner). This example illustrates the values set in the `config.toml` file for one runner process: ```toml concurrent = 100 [[runners]] name = "first" executor = "shell" limit = 40 (...) [[runners]] name = "second" executor = "docker+machine" limit = 30 (...) [[runners]] name = "third" executor = "ssh" limit = 10 [[runners]] name = "fourth" executor = "virtualbox" limit = 20 (...) ``` With this configuration: - One runner process can create four different runner workers using different execution environments. - The `concurrent` value is set to 100, so this one runner executes a maximum of 100 concurrent GitLab CI/CD jobs. - Only the `second` runner worker is configured to use the Docker Machine executor and therefore can automatically create VMs. - The `limit` setting of `30` means that the `second` runner worker can execute a maximum of 30 CI/CD jobs on autoscaled VMs at any point in time. - While `concurrent` defines the global concurrency limit across multiple `[[runners]]` workers, `limit` defines the maximum concurrency for a single `[[runners]]` worker. In this example, the runner process handles: - Across all `[[runners]]` workers, up to 100 concurrent jobs. - For the `first` worker, no more than 40 jobs, which are executed with the `shell` executor. - For the `second` worker, no more than 30 jobs, which are executed with the `docker+machine` executor. Additionally, GitLab Runner maintains VMs based on the autoscaling configuration in `[runners.machine]`, but no more than 30 VMs in all states (idle, in-use, in-creation, in-removal). - For the `third` worker, no more than 10 jobs, executed with the `ssh` executor. - For the `fourth` worker, no more than 20 jobs, executed with the `virtualbox` executor. In this second example, there are two `[[runners]]` workers configured to use the `docker+machine` executor. With this configuration, each runner worker manages a separate pool of VMs that are constrained by the value of the `limit` parameter. ```toml concurrent = 100 [[runners]] name = "first" executor = "docker+machine" limit = 80 (...) [[runners]] name = "second" executor = "docker+machine" limit = 50 (...) ``` In this example: - The runner processes no more than 100 jobs (the value of `concurrent`). - The runner process executes jobs in two `[[runners]]` workers, each of which uses the `docker+machine` executor. - The `first` runner can create a maximum of 80 VMs. Therefore this runner can execute a maximum of 80 jobs at any point in time. - The `second` runner can create a maximum of 50 VMs. Therefore this runner can execute a maximum of 50 jobs at any point in time. > [!note] > Though the sum of limit values is `130` (`80 + 50`), the runner process executes a maximum of 100 jobs concurrently because the global > `concurrent` setting is 100. ## Autoscaling algorithm and parameters The autoscaling algorithm is based on these parameters: - `IdleCount` - `IdleCountMin` - `IdleScaleFactor` - `IdleTime` - `MaxGrowthRate` - `limit` Any machine not running a job is considered to be idle. When GitLab Runner is in autoscale mode, it monitors all machines and ensures that there is always an `IdleCount` of idle machines. If there is an insufficient number of idle machines, GitLab Runner starts provisioning new machines, subject to the `MaxGrowthRate` limit. Requests for machines above the `MaxGrowthRate` value are put on hold until the number of machines being created falls below `MaxGrowthRate`. At the same time, GitLab Runner is checking the duration of the idle state of each machine. If the time exceeds the `IdleTime` value, the machine is automatically removed. ### Example configuration Consider a GitLab Runner configured with the following autoscale parameters: ```toml [[runners]] limit = 10 # (...) executor = "docker+machine" [runners.machine] MaxGrowthRate = 1 IdleCount = 2 IdleTime = 1800 # (...) ``` In the beginning, when no jobs are queued, GitLab Runner starts two machines (`IdleCount = 2`), and sets them in idle state. Also, the `IdleTime` is set to 30 minutes (`IdleTime = 1800`). Now, assume that five jobs are queued in GitLab CI/CD. The first two jobs are sent to the idle machines of which we have two. GitLab Runner starts new machines as it now notices that the number of idle is less than `IdleCount` (`0 < 2`). These machines are provisioned sequentially, to prevent exceeding the `MaxGrowthRate`. The remaining three jobs are assigned to the first machine that is ready. As an optimization, this can be a machine that was busy, but has now completed its job, or it can be a newly provisioned machine. For this example, assume that provisioning is fast and the new machines are ready before any earlier jobs complete. We now have one idle machine, so GitLab Runner starts one new machine to satisfy `IdleCount`. Because there are no new jobs in queue, those two machines stay in idle state and GitLab Runner is satisfied. **What happened**: In the example, there are two machines waiting in idle state for new jobs. After the five jobs are queued, new machines are created. So, in total there are seven machines: five running jobs and two in idle state waiting for the next jobs. GitLab Runner creates a new idle machine for each machine used for the job execution, until `IdleCount` is satisfied. Machines are created up to the number defined by the `limit` parameter. When GitLab Runner detects that this `limit` has been reached, it stops autoscaling. The new jobs must wait in the job queue until machines start returning to idle state. In the above example, two idle machines are always available. The `IdleTime` parameter applies only when the number exceeds `IdleCount`. At this point, GitLab Runner reduces the number of machines to match `IdleCount`. **Scaling down**: After the job finishes, the machine is set to idle state and waits for new jobs to be executed. If no new jobs appear in the queue, idle machines are removed after the time specified by `IdleTime`. In this example, all machines are removed after 30 minutes of inactivity (measured from when each machine's last job execution ended). GitLab Runner maintains an `IdleCount` of idle machines running, just like at the beginning of the example. The autoscaling algorithm works as follows: 1. GitLab Runner starts. 1. GitLab Runner creates two idle machines. 1. GitLab Runner picks one job. 1. GitLab Runner creates one more machine to maintain two idle machines. 1. The picked job finishes, resulting in three idle machines. 1. When one of the three idle machines exceeds `IdleTime` from the time after it picked the last job, it is removed. 1. GitLab Runner always maintains at least two idle machines for quick job processing. The following chart illustrates the states of machines and builds (jobs) in time: ![Autoscale state chart](img/autoscale-state-chart.png) ## How `concurrent`, `limit` and `IdleCount` generate the upper limit of running machines A magic equation doesn't exist to tell you what to set `limit` or `concurrent` to. Act according to your needs. Having `IdleCount` of idle machines is a speedup feature. You don't need to wait 10 s/20 s/30 s for the instance to be created. But as a user, you'd want all your machines (for which you need to pay) to be running jobs, not stay in idle state. So you should have `concurrent` and `limit` set to values that run the maximum count of machines you are willing to pay for. As for `IdleCount`, it should be set to a value that generates a minimum amount of _not used_ machines when the job queue is empty. Let's assume the following example: ```toml concurrent=20 [[runners]] limit = 40 [runners.machine] IdleCount = 10 ``` In the above scenario the total amount of machines we could have is 30. The `limit` of total machines (building and idle) can be 40. We can have 10 idle machines but the `concurrent` jobs are 20. So in total we can have 20 concurrent machines running jobs and 10 idle, summing up to 30. But what happens if the `limit` is less than the total amount of machines that could be created? The example below explains that case: ```toml concurrent=20 [[runners]] limit = 25 [runners.machine] IdleCount = 10 ``` In this example, you can have a maximum of 20 concurrent jobs and 25 machines. In the worst case scenario, you can't have 10 idle machines, but only 5, because the `limit` is 25. ## The `IdleScaleFactor` strategy The `IdleCount` parameter defines a static number of idle machines that the runner should sustain. The value you assign depends on your use case. Start by assigning a reasonably small number of machines in the idle state. Then, have them automatically adjust to a bigger number, depending on the current usage. To do that, use the experimental `IdleScaleFactor` setting. > [!warning] > `IdleScaleFactor` internally is an `float64` value and requires the float format to be used, > for example: `0.0`, `1.0`, or `1.5`. If an integer format is used (for example `IdleScaleFactor = 1`), > the runner process fails with the error: > `FATAL: Service run failed error=toml: cannot load TOML value of type int64 into a Go float`. When you use this setting, GitLab Runner tries to sustain a defined number of machines in the idle state. However, this number is no longer static. Instead of using `IdleCount`, GitLab Runner counts the machines in use and defines the desired idle capacity as a factor of that number. If there aren't any used machines, `IdleScaleFactor` evaluates to no idle machines to maintain. If `IdleCount` is greater than `0` (and only then the `IdleScaleFactor` is applicable), the runner doesn't ask for jobs if there are no idle machines that can handle them. Without new jobs the number of used machines would not rise, so `IdleScaleFactor` would constantly evaluate to `0`. And this would block the runner in an unusable state. Therefore, we've introduced the second setting: `IdleCountMin`. It defines the minimum number of idle machines that need to be sustained no matter what `IdleScaleFactor` evaluates to. **The setting can't be set to less than one if `IdleScaleFactor` is used. GitLab Runner automatically sets `IdleCountMin` to one**. You can also use `IdleCountMin` to define the minimum number of idle machines that should always be available. This allows new jobs entering the queue to start quickly. As with `IdleCount`, the value you assign depends on your use case. For example: ```toml concurrent=200 [[runners]] limit = 200 [runners.machine] IdleCount = 100 IdleCountMin = 10 IdleScaleFactor = 1.1 ``` In this case, when the runner approaches the decision point, it checks how many machines are in use. For example, if there are five idle machines and ten machines in use. Multiplying it by the `IdleScaleFactor`, the runner decides that it should have 11 idle machines. So 6 more are created. If you have 90 idle machines and 100 machines in use, based on the `IdleScaleFactor`, GitLab Runner sees that it should have `100 * 1.1 = 110` idle machines. So it again starts creating new ones. However, when it reaches the number of `100` idle machines, it stops creating more idle machines because this is the upper limit defined by `IdleCount`. If the 100 idle machines in use goes down to 20, the desired number of idle machines is `20 * 1.1 = 22`. GitLab Runner starts terminating the machines. As described above, GitLab Runner removes the machines that aren't used for `IdleTime`. Therefore, the removal of too many idle VMs are done aggressively. If the number of idle machines goes down to 0, the desired number of idle machines is `0 * 1.1 = 0`. This, however, is less than the defined `IdleCountMin` setting, so the runner starts removing the idle VMs until 10 VMs remain. After that point, scaling down stops and the runner keeps 10 machines in idle state. ## Configure autoscaling periods Autoscaling can be configured to have different values depending on the time period. Organizations might have regular times when spikes of jobs are being executed, and other times with few to no jobs. For example, most commercial companies work from Monday to Friday in fixed hours, like 10am to 6pm. On nights and weekends for the rest of the week, and on the weekends, no pipelines are started. These periods can be configured with the help of `[[runners.machine.autoscaling]]` sections. Each of them supports setting `IdleCount` and `IdleTime` based on a set of `Periods`. ### How autoscaling periods work In the `[runners.machine]` settings, you can add multiple `[[runners.machine.autoscaling]]` sections, each one with its own `IdleCount`, `IdleTime`, `Periods` and `Timezone` properties. A section should be defined for each configuration, proceeding in order from the most general scenario to the most specific scenario. All sections are parsed. The last one to match the current time is active. If none match, the values from the root of `[runners.machine]` are used. For example: ```toml [runners.machine] MachineName = "auto-scale-%s" MachineDriver = "google" IdleCount = 10 IdleTime = 1800 [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` In this configuration, every weekday between 9 and 16:59 UTC, machines are over-provisioned to handle the large traffic during operating hours. On the weekend, `IdleCount` drops to 5 to account for the drop in traffic. The rest of the time, the values are taken from the defaults in the root - `IdleCount = 10` and `IdleTime = 1800`. > [!note] > The 59th second of the last > minute in any period that you specify is not considered part of the period. > For more information, see [issue #2170](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2170). You can specify the `Timezone` of a period, for example `"Australia/Sydney"`. If you don't, the system setting of the host machine of every runner is used. This default can be stated as `Timezone = "Local"` explicitly. More information about the syntax of `[[runner.machine.autoscaling]]` sections can be found in [GitLab Runner - Advanced Configuration - The `[runners.machine]` section](advanced-configuration.md#the-runnersmachine-section). ## Distributed runners caching > [!note] > Read how to [use a distributed cache](speed_up_job_execution.md#use-a-distributed-cache). To speed up your jobs, GitLab Runner provides a [cache mechanism](https://docs.gitlab.com/ci/yaml/#cache) where selected directories and/or files are saved and shared between subsequent jobs. This mechanism works fine when jobs are run on the same host. However, when you start using the GitLab Runner autoscale feature, most of your jobs run on a new (or almost new) host. This new host executes each job in a new Docker container. In that case, you can't take advantage of the cache feature. To overcome this issue, together with the autoscale feature, the distributed runners cache feature was introduced. This feature uses configured object storage server to share the cache between used Docker hosts. GitLab Runner queries the server and downloads the archive to restore the cache, or uploads it to archive the cache. To enable distributed caching, you have to define it in `config.toml` using the [`[runners.cache]` directive](advanced-configuration.md#the-runnerscache-section): ```toml [[runners]] limit = 10 executor = "docker+machine" [runners.cache] Type = "s3" Path = "path/to/prefix" Shared = false [runners.cache.s3] ServerAddress = "s3.example.com" AccessKey = "access-key" SecretKey = "secret-key" BucketName = "runner" Insecure = false ``` In the example above, the S3 URLs follow the structure `http(s)://///runner//project//`. To share the cache between two or more runners, set the `Shared` flag to true. This flag removes the runner token from the URL (`runner/`) and all configured runners share the same cache. You can also set `Path` to separate caches between runners when cache sharing is enabled. ## Distributed container registry mirroring To speed up jobs executed inside of Docker containers, you can use the [Docker registry mirroring service](https://docs.docker.com/retired/#registry-now-cncf-distribution). This service provides a proxy between your Docker machines and all used registries. Images are downloaded one time by the registry mirror. On each new host, or on an existing host where the image is not available, the image is downloaded from the configured registry mirror. Provided that the mirror exists in your Docker machines LAN, the image downloading step should be much faster on each host. To configure the Docker registry mirroring, you have to add `MachineOptions` to the configuration in `config.toml`: ```toml [[runners]] limit = 10 executor = "docker+machine" [runners.machine] (...) MachineOptions = [ (...) "engine-registry-mirror=http://10.11.12.13:12345" ] ``` Where `10.11.12.13:12345` is the IP address and port where your registry mirror is listening for connections from the Docker service. It must be accessible for each host created by Docker Machine. Read more about how to [use a proxy for containers](speed_up_job_execution.md#use-a-proxy-for-containers). ## A complete example of `config.toml` The `config.toml` below uses the [`google` Docker Machine driver](https://github.com/docker/docs/blob/173d3c65f8e7df2a8c0323594419c18086fc3a30/machine/drivers/gce.md): ```toml concurrent = 50 # All registered runners can run up to 50 concurrent jobs [[runners]] url = "https://gitlab.com" token = "RUNNER_TOKEN" # Note this is different from the registration token used by `gitlab-runner register` name = "autoscale-runner" executor = "docker+machine" # This runner is using the 'docker+machine' executor limit = 10 # This runner can execute up to 10 jobs (created machines) [runners.docker] image = "ruby:3.3" # The default image used for jobs is 'ruby:3.3' [runners.machine] IdleCount = 5 # There must be 5 machines in Idle state - when Off Peak time mode is off IdleTime = 600 # Each machine can be in Idle state up to 600 seconds (after this it will be removed) - when Off Peak time mode is off MaxBuilds = 100 # Each machine can handle up to 100 jobs in a row (after this it will be removed) MachineName = "auto-scale-%s" # Each machine will have a unique name ('%s' is required) MachineDriver = "google" # Refer to Docker Machine docs on how to authenticate: https://docs.docker.com/machine/drivers/gce/#credentials MachineOptions = [ "google-project=GOOGLE-PROJECT-ID", "google-zone=GOOGLE-ZONE", # e.g. 'us-west1' "google-machine-type=GOOGLE-MACHINE-TYPE", # e.g. 'n1-standard-8' "google-machine-image=ubuntu-os-cloud/global/images/family/ubuntu-1804-lts", "google-username=root", "google-use-internal-ip", "engine-registry-mirror=https://mirror.gcr.io" ] [[runners.machine.autoscaling]] # Define periods with different settings Periods = ["* * 9-17 * * mon-fri *"] # Every workday between 9 and 17 UTC IdleCount = 50 IdleCountMin = 5 IdleScaleFactor = 1.5 # Means that current number of Idle machines will be 1.5*in-use machines, # no more than 50 (the value of IdleCount) and no less than 5 (the value of IdleCountMin) IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] # During the weekends IdleCount = 5 IdleTime = 60 Timezone = "UTC" [runners.cache] Type = "s3" [runners.cache.s3] ServerAddress = "s3.eu-west-1.amazonaws.com" AccessKey = "AMAZON_S3_ACCESS_KEY" SecretKey = "AMAZON_S3_SECRET_KEY" BucketName = "runner" Insecure = false ``` The `MachineOptions` parameter contains options for both the `google` driver that Docker Machine uses to create machines on Google Compute Engine and for Docker Machine itself (`engine-registry-mirror`). ================================================ FILE: docs/configuration/configuring_runner_operator.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Configuring GitLab Runner on OpenShift --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} This document explains how to configure GitLab Runner on OpenShift. ## Passing properties to GitLab Runner Operator When creating a `Runner`, you can configure it by setting properties in its `spec`. For example, you can specify the GitLab URL where the runner is registered, or the name of the secret that contains the registration token: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret # Name of the secret containing the Runner token ``` Read about all the available properties in [Operator properties](#operator-properties). ## Operator properties The following properties can be passed to the Operator. Some properties are only available with more recent versions of the Operator. | Setting | Operator | Description | |--------------------|----------|-------------| | `gitlabUrl` | all | The fully qualified domain name for the GitLab instance, for example, `https://gitlab.example.com`. | | `token` | all | Name of `Secret` containing the `runner-registration-token` key used to register the runner. | | `tags` | all | List of comma-separated tags to be applied to the runner. | | `concurrent` | all | Limits how many jobs can run concurrently. The maximum number is all defined runners. 0 does not mean unlimited. Default is `10`. | | `interval` | all | Defines the number of seconds between checks for new jobs. Default is `30`. | | `locked` | 1.8 | Defines if the runner should be locked to a project. Default is `false`. | | `runUntagged` | 1.8 | Defines if jobs without tags should be run. Default is `true` if no tags were specified. Otherwise, it's `false`. | | `protected` | 1.8 | Defines if the runner should run jobs on protected branches only. Default is `false`. | | `cloneURL` | all | Overwrite the URL for the GitLab instance. Used only if the runner can't connect to the GitLab URL. | | `env` | all | Name of `ConfigMap` containing key-value pairs that are injected as environment variables in the Runner pod. | | `runnerImage` | 1.7 | Overwrites the default GitLab Runner image. Default is the Runner image the operator was bundled with. | | `helperImage` | all | Overwrites the default GitLab Runner helper image. | | `buildImage` | all | The default Docker image to use for builds when none is specified. | | `cacheType` | all | Type of cache used for Runner artifacts. One of: `gcs`, `s3`, `azure`. | | `cachePath` | all | Defines the cache path on the file system. | | `cacheShared` | all | Enable sharing of cache between runners. | | `s3` | all | Options used to set up S3 cache. Refer to [Cache properties](#cache-properties). | | `gcs` | all | Options used to set up `gcs` cache. Refer to [Cache properties](#cache-properties). | | `azure` | all | Options used to set up Azure cache. Refer to [Cache properties](#cache-properties). | | `ca` | all | Name of TLS secret containing the custom certificate authority (CA) certificates. | | `serviceaccount` | all | Use to override service account used to run the Runner pod. | | `config` | all | Use to provide a custom `ConfigMap` with a [configuration template](../register/_index.md#register-with-a-configuration-template). | | `shutdownTimeout` | 1.34 | Number of seconds until the [forceful shutdown operation](../commands/_index.md#signals) times out and exits the process. The default value is `30`. If set to `0` or lower, the default value is used. | | `logLevel` | 1.34 | Defines the log level. Options are `debug`, `info`, `warn`, `error`, `fatal`, and `panic`. | | `logFormat` | 1.34 | Specifies the log format. Options are `runner`, `text`, and `json`. The default value is `runner`, which contains ANSI escape codes for coloring. | | `listenAddr` | 1.34 | Defines an address (`:`) the Prometheus metrics HTTP server should listen on. For information about configuration, see [Monitor GitLab Runner Operator](../monitoring/_index.md#monitor-operator-managed-gitlab-runners). | | `sentryDsn` | 1.34 | Enables tracking of all system level errors to Sentry. | | `connectionMaxAge` | 1.34 | The maximum duration a TLS keepalive connection to the GitLab server should remain open before reconnecting. The default value is `15m` for 15 minutes. If set to `0` or lower, the connection persists as long as possible. | | `podSpec` | 1.23 | List of patches to apply to the GitLab Runner pod (template). For more information, see [Patching the runner pod template](#patching-the-runner-pod-template). | | `deploymentSpec` | 1.40 | List of patches to apply to the GitLab Runner deployment. For more information, see [Patching the runner deployment template](#patching-the-runner-deployment-template). | ## Cache properties ### S3 cache | Setting | Operator | Description | |---------------|----------|-------------| | `server` | all | The S3 server address. | | `credentials` | all | Name of the `Secret` containing the `accesskey` and `secretkey` properties used to access the object storage. | | `bucket` | all | Name of the bucket in which the cache is stored. | | `location` | all | Name of the S3 region in which the cache is stored. | | `insecure` | all | Use insecure connections or `HTTP`. | ### `gcs` cache | Setting | Operator | Description | |-------------------|----------|-------------| | `credentials` | all | Name of the `Secret` containing the `access-id` and `private-key` properties used to access the object storage. | | `bucket` | all | Name of the bucket in which the cache is stored. | | `credentialsFile` | all | Takes the `gcs` credentials file, `keys.json`. | ### Azure cache | Setting | Operator | Description | |-----------------|----------|-------------| | `credentials` | all | Name of the `Secret` containing the `accountName` and `privateKey` properties used to access the object storage. | | `container` | all | Name of the Azure container in which the cache is stored. | | `storageDomain` | all | The domain name of the Azure blob storage. | ## Configure a proxy environment To create a proxy environment: 1. Edit the `custom-env.yaml` file. For example: ```yaml apiVersion: v1 data: HTTP_PROXY: example.com kind: ConfigMap metadata: name: custom-env ``` 1. Update OpenShift to apply the changes. ```shell oc apply -f custom-env.yaml ``` 1. Update your [`gitlab-runner.yml`](../install/operator.md#install-gitlab-runner) file. ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret # Name of the secret containing the Runner token env: custom-env ``` If the proxy can't reach the Kubernetes API, you might see an error in your CI/CD job: ```shell ERROR: Job failed (system failure): prepare environment: setting up credentials: Post https://172.21.0.1:443/api/v1/namespaces//secrets: net/http: TLS handshake timeout. Check https://docs.gitlab.com/runner/shells/#shell-profile-loading for more information ``` To resolve this error, add the IP address of the Kubernetes API to `NO_PROXY` configuration in the `custom-env.yaml` file: ```yaml apiVersion: v1 data: NO_PROXY: 172.21.0.1 HTTP_PROXY: example.com kind: ConfigMap metadata: name: custom-env ``` You can verify the IP address of the Kubernetes API by running: ```shell oc get services --namespace default --field-selector='metadata.name=kubernetes' | grep -v NAME | awk '{print $3}' ``` ## Customize `config.toml` with a configuration template You can customize the runner's `config.toml` file by using the [configuration template](../register/_index.md#register-with-a-configuration-template). 1. Create a custom configuration template file. For example, let's instruct our runner to mount an `EmptyDir` volume and set the `cpu_limit`. Create the `custom-config.toml` file: ```toml [[runners]] [runners.kubernetes] cpu_limit = "500m" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty-dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` 1. Create a `ConfigMap` named `custom-config-toml` from our `custom-config.toml` file: ```shell oc create configmap custom-config-toml --from-file config.toml=custom-config.toml ``` 1. Set the `config` property of the `Runner`: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret config: custom-config-toml ``` Because of a [known issue](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/issues/229), you must use environment variables instead of configuration templates to modify the following settings: | Setting | Environment variable | Default value | |----------------------------------|------------------------------|---------------| | `runners.request_concurrency` | `RUNNER_REQUEST_CONCURRENCY` | `1` | | `runners.output_limit` | `RUNNER_OUTPUT_LIMIT` | `4096` | | `kubernetes.runner.poll_timeout` | `KUBERNETES_POLL_TIMEOUT` | `180` | ## Configure a custom TLS cert 1. To set a custom TLS cert, create a secret with key `tls.crt`. In this example, the file is named `custom-tls-ca-secret.yaml`: ```yaml apiVersion: v1 kind: Secret metadata: name: custom-tls-ca type: Opaque stringData: tls.crt: | -----BEGIN CERTIFICATE----- MIIEczCCA1ugAwIBAgIBADANBgkqhkiG9w0BAQQFAD..AkGA1UEBhMCR0Ix ..... 7vQMfXdGsRrXNGRGnX+vWDZ3/zWI0joDtCkNnqEpVn..HoX -----END CERTIFICATE----- ``` 1. Create the secret: ```shell oc apply -f custom-tls-ca-secret.yaml ``` 1. Set the `ca` key in the `runner.yaml` to the same name as the name of our secret: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret ca: custom-tls-ca ``` ## Configure the CPU and memory size of runner pods To set [CPU limits](../executors/kubernetes/_index.md#cpu-requests-and-limits) and [memory limits](../executors/kubernetes/_index.md#memory-requests-and-limits) in a custom `config.toml` file, follow the instructions in [this topic](#customize-configtoml-with-a-configuration-template). ## Configure job concurrency per runner based on cluster resources Set the `concurrent` property of the `Runner` resource: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret concurrent: 2 ``` Job concurrency is dictated by the requirements of the project. 1. Start by trying to determine the compute and memory resources required to execute a CI job. 1. Calculate how many times that job would be able to execute given the resources in the cluster. If you set a high concurrency value, the Kubernetes executor processes the jobs as soon as it can. However, the Kubernetes cluster's scheduler capacity determines when the job is scheduled. ## Service account for the GitLab Runner manager For a fresh installation, GitLab Runner creates a Kubernetes `ServiceAccount` named `gitlab-runner-app-sa` for the runner manager pod if these RBAC role binding resources don't exist: - `gitlab-runner-app-rolebinding` - `gitlab-runner-rolebinding` If one of the role bindings exists, GitLab resolves the role and service account from the `subjects` and `roleRef` defined in the role binding. If both role bindings exist, `gitlab-runner-app-rolebinding` takes precedence over `gitlab-runner-rolebinding`. ## Troubleshooting ### Root vs non-root The GitLab Runner Operator and the GitLab Runner pod run as non-root users. As a result, the build image used in the job must run as a non-root user to be able to complete successfully. This ensures that jobs can run successfully with the least permission. To make this work, make sure that the build image used for CI/CD jobs: - Runs as non-root - Does not write to restricted filesystems Most container filesystems on an OpenShift cluster are read-only, except: - Mounted volumes - `/var/tmp` - `/tmp` - Other volumes mounted on root filesystems as `tmpfs` #### Overriding the `HOME` environment variable If creating a custom build image or [overriding environment variables](#configure-a-proxy-environment), ensure that the `HOME` environment variables is not set to `/` which would be read-only. Especially if your jobs would need to write files to the home directory. You could create a directory under `/home` for example `/home/ci` and set `ENV HOME=/home/ci` in your `Dockerfile`. For the runner pods [it's expected that `HOME` would be set to `/home/gitlab-runner`](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/-/blob/e265820a00a6a1b9a271dc132de2618ced43cf92/runner/Dockerfile.OCP#L14). If this variable is changed, the new location must have the [proper permissions](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/-/blob/e265820a00a6a1b9a271dc132de2618ced43cf92/runner/Dockerfile.OCP#L38). These guidelines are also documented in the [Red Hat Container Platform documentation](https://docs.redhat.com/en/documentation/openshift_container_platform/4.18/html/images/creating-images#images-create-guide-openshift_create-images). ### Overriding `locked` variable When you register a runner token, if you set the `locked` variable to `true`, the error `Runner configuration other than name, description, and exector is reserved and cannot be specified` appears. ```yaml locked: true # REQUIRED tags: "" runUntagged: false protected: false maximumTimeout: 0 ``` For more information, see [issue 472](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/472#note_1483346437). #### Watch out for security context constraints By default, when installed in a new OpenShift project, the GitLab Runner Operator runs as non-root. Some projects, like the `default` project, are exceptions where all service accounts have `anyuid` access. In that case, the user of the image is `root`. You can check this by running the `whoami` inside any container shell, for example, a job. Read more about security context constraints in [Red Hat Container Platform documentation](https://docs.redhat.com/en/documentation/openshift_container_platform/4.18/html/authentication_and_authorization/managing-pod-security-policies). #### Run as `anyuid` security context constraints > [!warning] > Running jobs as root or writing to root filesystems can expose your system to security risks. To run a CI/CD job as the root user or write to root filesystems, set the `anyuid` security context constraints on the `gitlab-runner-app-sa` service account. The GitLab Runner container uses this service account. In OpenShift 4.3.8 and earlier: ```shell oc adm policy add-scc-to-user anyuid -z gitlab-runner-app-sa -n # Check that the anyiud SCC is set: oc get scc anyuid -o yaml ``` In OpenShift 4.3.8 and later: ```shell oc create -f - < rules: - apiGroups: - security.openshift.io resourceNames: - anyuid resources: - securitycontextconstraints verbs: - use EOF oc create -f - < subjects: - kind: ServiceAccount name: gitlab-runner-app-sa roleRef: kind: Role name: scc-anyuid apiGroup: rbac.authorization.k8s.io EOF ``` #### Matching helper container and build container user ID and group ID GitLab Runner Operator deployments use `registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp` as the default helper image. This image runs with user ID and group ID of `1001:1001` unless explicitly modified by a security context. When the user ID in your build container differs from the user ID in the helper image, permission-related errors can occur during your build. The following is a common error message: ```shell fatal: detected dubious ownership in repository at '/builds/gitlab-org/gitlab-runner' ``` This error indicates that the repository was cloned by user ID `1001` (helper container), but a different user ID in the build container is attempting to access it. Solution: configure your build container's security context to match the helper container's user ID and group ID: ```toml [runners.kubernetes.build_container_security_context] run_as_user = 1001 run_as_group = 1001 ``` Additional notes: - These settings ensure consistent file ownership between the container that clones the repository and the container that builds it. - If you've customized your helper image with different user ID or group IDs, adjust these values accordingly. - For OpenShift deployments, verify that these security context settings comply with your cluster's security context constraints (SCCs). #### Configure SETFCAP If you use Red Hat OpenShift Container Platform (RHOCP) 4.11 or later, you may get the following error message: ```shell error reading allowed ID mappings:error reading subuid mappings for user ``` Some jobs (for example, `buildah`) need the `SETFCAP` capability granted to run correctly. To fix this issue: 1. Add the SETFCAP capability to the security context constraints that GitLab Runner is using (replace the `gitlab-scc` with the security context constraints assigned to your GitLab Runner pod): ```shell oc patch scc gitlab-scc --type merge -p '{"allowedCapabilities":["SETFCAP"]}' ``` 1. Update your `config.toml` and add the `SETFCAP` capability under the `kubernetes` section: ```yaml [[runners]] [runners.kubernetes] [runners.kubernetes.pod_security_context] [runners.kubernetes.build_container_security_context] [runners.kubernetes.build_container_security_context.capabilities] add = ["SETFCAP"] ``` 1. Create a `ConfigMap` using this `config.toml` in the namespace where GitLab Runner is deployed: ```shell oc create configmap custom-config-toml --from-file config.toml=config.toml ``` 1. Modify the runner you want to fix, adding the `config:` parameter to point to the recently created `ConfigMap` (replace my-runner with the correct runner pod name). ```shell oc patch runner my-runner --type merge -p '{"spec": {"config": "custom-config-toml"}}' ``` For more information, see the [Red Hat documentation](https://access.redhat.com/solutions/7016013). ### Using FIPS Compliant GitLab Runner > [!note] > For Operator, you can change only the helper image. You can't change the GitLab Runner image yet. > [Issue 28814](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28814) tracks this feature. To use a [FIPS compliant GitLab Runner helper](../install/requirements.md#fips-compliant-gitlab-runner), change the helper image as follows: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret helperImage: gitlab/gitlab-runner-helper:ubi-fips concurrent: 2 ``` #### Register GitLab Runner by using a self-signed certificate To use self-signed certificate with GitLab Self-Managed, create a secret that contains the CA certificate you used to sign the private certificates. The name of the secret is then provided as the CA in the Runner spec section: ```yaml KIND: Runner VERSION: apps.gitlab.com/v1beta2 FIELD: ca DESCRIPTION: Name of tls secret containing the custom certificate authority (CA) certificates ``` The secret can be created using the following command: ```shell oc create secret generic mySecret --from-file=tls.crt=myCert.pem -o yaml ``` #### Register GitLab Runner with an external URL that points to an IP address If the runner cannot match the self-signed certificate with the hostname, you might get an error message. This issue occurs when you configure GitLab Self-Managed to use an IP address (like `###.##.##.##`) instead of a hostname: ```shell [31;1mERROR: Registering runner... failed [0;m [31;1mrunner[0;m=A5abcdEF [31;1mstatus[0;m=couldn't execute POST against https://###.##.##.##/api/v4/runners: Post https://###.##.##.##/api/v4/runners: x509: cannot validate certificate for ###.##.##.## because it doesn't contain any IP SANs [31;1mPANIC: Failed to register the runner. You may be having network problems.[0;m ``` To fix this issue: 1. On the GitLab Self-Managed server, modify the `openssl` to add the IP address to the `subjectAltName` parameter: ```shell # vim /etc/pki/tls/openssl.cnf [ v3_ca ] subjectAltName=IP:169.57.64.36 <---- Add this line. 169.57.64.36 is your GitLab server IP. ``` 1. Then re-generate a self-signed CA with the commands below: ```shell # cd /etc/gitlab/ssl # openssl req -x509 -nodes -days 3650 -newkey rsa:4096 -keyout /etc/gitlab/ssl/169.57.64.36.key -out /etc/gitlab/ssl/169.57.64.36.crt # openssl dhparam -out /etc/gitlab/ssl/dhparam.pem 4096 # gitlab-ctl restart ``` 1. Use this new certificate to generate a new secret. ## Patch structure Each specification patch consists of the following properties: | Setting | Description | |-------------|-------------| | `name` | Name of the custom specification patch. | | `patchFile` | Path to the file that defines the changes to apply to the final specification before it is generated. The file must be a JSON or YAML file. | | `patch` | A JSON or YAML format string that describes the changes to apply to the final specification before it is generated. | | `patchType` | The strategy used to apply the specified changes to the specification. The accepted values are `merge`, `json`, and `strategic` (default). | You cannot set both `patchFile` and `patch` in the same specification configuration. ## Patching the runner pod template [Pod specification](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-template-v1/#PodTemplateSpec) patching lets you customize how GitLab Runner is deployed by applying patches to the operator-generated Kubernetes deployment. Patches are applied to the pod template's specification (`deployment.spec.template.spec`). You can control pod-level settings such as: - Resource requests and limits - Security contexts - Volume mounts and volumes - Environment variables - Node selectors and affinity rules - Tolerations - Hostname and DNS configuration ## Patching the runner deployment template [Deployment specification](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/deployment-v1/#Deployment) patching lets you customize how GitLab Runner is deployed by applying patches to the operator-generated Kubernetes deployment. Patches are applied to the deployment specification (`deployment.spec`). You can control deployment-level settings such as: - Replica count - Deployment strategy (RollingUpdate, Recreate) - Revision history limits - Progress deadline seconds - Labels and annotations ## Patch order Deployment specification patches are applied before pod specification patches. This means that if both deployment and pod specifications modify the same field, the pod specification takes precedence. ## Examples ### Pod specification patching example ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret podSpec: - name: "set-hostname" patch: | hostname: "custom-hostname" patchType: "merge" - name: "add-resource-requests" patch: | containers: - name: build resources: requests: cpu: "500m" memory: "256Mi" patchType: "strategic" ``` ### Deployment specification patching example ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret deploymentSpec: - name: "set-replicas" patch: | replicas: 3 patchType: "strategic" - name: "configure-strategy" patch: | strategy: type: RollingUpdate rollingUpdate: maxUnavailable: 25% maxSurge: 50% patchType: "strategic" - name: "set-revision-history" patch: | [{"op": "add", "path": "/revisionHistoryLimit", "value": 10}] patchType: "json" ``` ## Best practices - Test patches in a non-production environment before applying them to production deployments. - Use deployment-level patches for settings that affect the deployment behavior rather than individual pod settings. - Remember that pod specification patches override deployment specification patches for conflicting fields. ================================================ FILE: docs/configuration/feature-flags.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner feature flags --- > [!warning] > Data corruption, stability degradation, performance degradation, and security issues may occur if you enable a feature that's disabled by default. Before you enable feature flags, you should be aware of the risks involved. For more information, see [Risks when enabling features still in development](https://docs.gitlab.com/administration/feature_flags/#risks-when-enabling-features-still-in-development). Feature flags are toggles that allow you to enable or disable specific features. These flags are typically used: - For beta features that are made available for volunteers to test, but that are not ready to be enabled for all users. Beta features are sometimes incomplete or need further testing. A user who wants to use a beta feature can choose to accept the risk and explicitly enable the feature with a feature flag. Other users who do not need the feature or who are not willing to accept the risk on their system have the feature disabled by default and are not impacted by possible bugs and regressions. - For breaking changes that result in functionality deprecation or feature removal in the near future. As the product evolves, features are sometimes changed or removed entirely. Known bugs are often fixed, but in some cases, users have already found a workaround for a bug that affected them; forcing users to adopt the standardized bug fix might cause other problems with their customized configurations. In such cases, the feature flag is used to switch from the old behavior to the new one on demand. This allows users to adopt new versions of the product while giving them time to plan for a smooth, permanent transition from the old behavior to the new behavior. Feature flags are toggled using environment variables. To: - Activate a feature flag, set the corresponding environment variable to `"true"` or `1`. - Deactivate a feature flag, set the corresponding environment variable to `"false"` or `0`. ## Available feature flags | Feature flag | Default value | Deprecated | To be removed with | Description | |--------------|---------------|------------|--------------------|-------------| | `FF_NETWORK_PER_BUILD` | `false` | {{< icon name="dotted-circle" >}} No | | Enables creation of a Docker [network per build](../executors/docker.md#network-configurations) with the `docker` executor. Use the `CI_BUILD_NETWORK_NAME` variable to get the network name. | | `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} No | | When set to `false` disables execution of remote Kubernetes commands through `exec` in favor of `attach` to solve problems like [#4119](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4119). This feature flag requires the Service Account to have specific permissions. For more information, see [configure runner API permissions](../executors/kubernetes/_index.md#configure-runner-api-permissions). | | `FF_USE_DIRECT_DOWNLOAD` | `true` | {{< icon name="dotted-circle" >}} No | | When set to `true` Runner tries to direct-download all artifacts instead of proxying through GitLab on a first try. Enabling might result in a download failures due to problem validating TLS certificate of Object Storage if it is enabled by GitLab. See [Self-signed certificates or custom Certification Authorities](tls-self-signed.md) | | `FF_SKIP_NOOP_BUILD_STAGES` | `true` | {{< icon name="dotted-circle" >}} No | | When set to `false` all build stages are executed even if running them has no effect | | `FF_USE_FASTZIP` | `false` | {{< icon name="dotted-circle" >}} No | | Fastzip is a performant archiver for cache/artifact archiving and extraction | | `FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} No | | If enabled will remove the usage of `umask 0000` call for jobs executed with `docker` executor. Instead Runner will try to discover the UID and GID of the user configured for the image used by the build container and will change the ownership of the working directory and files by running the `chmod` command in the predefined container (after updating sources, restoring cache and downloading artifacts). POSIX utility `id` must be installed and operational in the build image for this feature flag. Runner will execute `id` with options `-u` and `-g` to retrieve the UID and GID. | | `FF_ENABLE_BASH_EXIT_CODE_CHECK` | `false` | {{< icon name="dotted-circle" >}} No | | If enabled, bash scripts don't rely solely on `set -e`, but check for a non-zero exit code after each script command is executed. | | `FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} No | | In GitLab Runner 16.10 and later, the default is `false`. In GitLab Runner 16.9 and earlier, the default is `true`. When disabled, processes that Runner creates on Windows (shell and custom executor) will be created with additional setup that should improve process termination. When set to `true`, legacy process setup is used. To successfully and gracefully drain a Windows Runner, this feature flag should be set to `false`. | | `FF_USE_NEW_BASH_EVAL_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} No | | When set to `true`, the Bash `eval` call is executed in a subshell to help with proper exit code detection of the script executed. | | `FF_USE_POWERSHELL_PATH_RESOLVER` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, PowerShell resolves pathnames rather than Runner using OS-specific filepath functions that are specific to where Runner is hosted. | | `FF_USE_DYNAMIC_TRACE_FORCE_SEND_INTERVAL` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the trace force send interval for logs is dynamically adjusted based on the trace update interval. | | `FF_SCRIPT_SECTIONS` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, multi-line script commands appear as collapsible sections in the job log, while single-line commands are printed directly with a `$` prefix. This is a known issue. For more information, see [issue 39294](https://gitlab.com/gitlab-org/gitlab-runner/-/work_items/39294). | | `FF_ENABLE_JOB_CLEANUP` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the project directory will be cleaned up at the end of the build. If `GIT_CLONE` is used, the whole project directory will be deleted. If `GIT_FETCH` is used, a series of Git `clean` commands will be issued. | | `FF_KUBERNETES_HONOR_ENTRYPOINT` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the Docker entrypoint of an image will be honored if `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` is not set to true. This feature flag requires the service account to have specific permissions. For more information, see [configure runner API permissions](../executors/kubernetes/_index.md#configure-runner-api-permissions). | | `FF_POSIXLY_CORRECT_ESCAPES` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, [POSIX shell escapes](https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02) are used rather than [`bash`-style ANSI-C quoting](https://www.gnu.org/software/bash/manual/html_node/Quoting.html). This should be enabled if the job environment uses a POSIX-compliant shell. | | `FF_RESOLVE_FULL_TLS_CHAIN` | `false` | {{< icon name="dotted-circle" >}} No | | In GitLab Runner 16.4 and later, the default is `false`. In GitLab Runner 16.3 and earlier, the default is `true`. When enabled, the runner resolves a full TLS chain all the way down to a self-signed root certificate for `CI_SERVER_TLS_CA_FILE`. This was previously [required to make Git HTTPS clones work](tls-self-signed.md#git-cloning) for a Git client built with libcurl prior to v7.68.0 and OpenSSL. However, the process to resolve certificates might fail on some operating systems, such as macOS, that reject root certificates signed with older signature algorithms. If certificate resolution fails, you might need to disable this feature. This feature flag can only be disabled in the [`[runners.feature_flags]` configuration](#enable-feature-flag-in-runner-configuration). | | `FF_DISABLE_POWERSHELL_STDIN` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, PowerShell scripts for shell and custom executors are passed by file, rather than passed and executed via stdin. This is required for jobs' `allow_failure:exit_codes` keywords to work correctly. | | `FF_USE_POD_ACTIVE_DEADLINE_SECONDS` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, the [pod `activeDeadlineSeconds`](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#lifecycle) is set to the CI/CD job timeout. This flag affects the [pod's lifecycle](../executors/kubernetes/_index.md#pod-lifecycle). | | `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the user can set an entire whole pod specification in the `config.toml` file. For more information, see [Overwrite generated pod specifications (Experiment)](../executors/kubernetes/_index.md#overwrite-generated-pod-specifications). | | `FF_SET_PERMISSIONS_BEFORE_CLEANUP` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, permissions on directories and files in the project directory are set first, to ensure that deletions during cleanup are successful. | | `FF_SECRET_RESOLVING_FAILS_IF_MISSING` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, secret resolving fails if the value cannot be found. | | `FF_PRINT_POD_EVENTS` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, all events associated with the build pod will be printed until it's started. | | `FF_USE_GIT_BUNDLE_URIS` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, the Git `transfer.bundleURI` configuration option is set to `true`. This FF is enabled by default. Set to `false` to disable Git bundle support. | | `FF_USE_GIT_NATIVE_CLONE` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled and `GIT_STRATEGY=clone`, the `git-clone(1)` command is used instead of `git-init(1)` + `git-fetch(1)` to clone the project. This requires Git version 2.49 and later, and falls back to `init` + `fetch` if not available. | | `FF_USE_DUMB_INIT_WITH_KUBERNETES_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, `dumb-init` is used to execute all the scripts. This allows `dumb-init` to run as the first process in the helper and build container. | | `FF_USE_INIT_WITH_DOCKER_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the Docker executor starts the service and build containers with the `--init` option, which runs `tini-init` as PID 1. | | `FF_LOG_IMAGES_CONFIGURED_FOR_JOB` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the runner logs names of the image and service images defined for each received job. | | `FF_USE_DOCKER_AUTOSCALER_DIAL_STDIO` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled (the default), `docker system stdio` is used to tunnel to the remote Docker daemon. When disabled, for SSH connections a native SSH tunnel is used, and for WinRM connections a 'fleeting-proxy' helper binary is first deployed. | | `FF_CLEAN_UP_FAILED_CACHE_EXTRACT` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, commands are inserted into build scripts to detect a failed cache extraction and clean up partial cache contents left behind. | | `FF_USE_WINDOWS_JOB_OBJECT` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, a job object is created for each process that the runner creates on Windows with the shell and custom executors. To force-kill the processes, the runner closes the job object. This should improve the termination of difficult-to-kill processes. | | `FF_TIMESTAMPS` | `true` | {{< icon name="dotted-circle" >}} No | | When disabled timestamps are not added to the beginning of each log trace line. | | `FF_DISABLE_AUTOMATIC_TOKEN_ROTATION` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, it restricts automatic token rotation and logs a warning when the token is about to expire. | | `FF_USE_LEGACY_GCS_CACHE_ADAPTER` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the legacy GCS Cache adapter is used. When disabled (default), a newer GCS Cache adapter is used which uses Google Cloud Storage's SDK for authentication. This should resolve authentication problems in environments that the legacy adapter struggled with, such as workload identity configurations in GKE. | | `FF_DISABLE_UMASK_FOR_KUBERNETES_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, removes the `umask 0000` call for jobs executed with the Kubernetes executor. Instead, the runner tries to discover the user ID (UID) and group ID (GID) of the user the build container runs as. The runner also changes the ownership of the working directory and files by running the `chown` command in the predefined container (after updating sources, restoring cache, and downloading artifacts). | | `FF_USE_LEGACY_S3_CACHE_ADAPTER` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the legacy S3 Cache adapter is used. When disabled (default), a newer S3 Cache adapter is used which uses Amazon's S3 SDK for authentication. This should resolve authentication problems in environments that the legacy adapter struggled with, such as custom STS endpoints. | | `FF_GIT_URLS_WITHOUT_TOKENS` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, GitLab Runner doesn't embed the job token anywhere during Git configuration or command execution. Instead, it sets up a Git credential helper that uses the environment variable to obtain the job token. This approach limits token storage and reduces the risk of token leaks. | | `FF_WAIT_FOR_POD_TO_BE_REACHABLE` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the runner waits for the Pod status to be 'Running', and for the Pod to be ready with its certificates attached. For more information, see [configure runner API permissions](../executors/kubernetes/_index.md#configure-runner-api-permissions). | | `FF_MASK_ALL_DEFAULT_TOKENS` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, GitLab Runner automatically masks all default tokens patterns. | | `FF_EXPORT_HIGH_CARDINALITY_METRICS` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the runner exports the metrics with high cardinality. Special care should be taken when enabling this feature flag to avoid ingesting large amounts of data. For more information, see [Fleet scaling](../fleet_scaling/_index.md). | | `FF_USE_FLEETING_ACQUIRE_HEARTBEATS` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, fleeting instance connectivity is checked before a job is assigned to an instance. | | `FF_USE_EXPONENTIAL_BACKOFF_STAGE_RETRY` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, the retries for `GET_SOURCES_ATTEMPTS`, `ARTIFACT_DOWNLOAD_ATTEMPTS`, `RESTORE_CACHE_ATTEMPTS`, and `EXECUTOR_JOB_SECTION_ATTEMPTS` use exponential backoff (5 sec - 5 min). | | `FF_USE_ADAPTIVE_REQUEST_CONCURRENCY` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, the `request_concurrency` setting becomes the maximum concurrency value, and the number of concurrent requests adjusts based on the rate of successful job requests. | | `FF_USE_GITALY_CORRELATION_ID` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, the `X-Gitaly-Correlation-ID` header is added to all Git HTTP requests. When disabled, the Git operations execute without Gitaly Correlation ID headers. | | `FF_USE_GIT_PROACTIVE_AUTH` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, the runner passes the `http.proactiveAuth=basic` Git configuration option to `git clone` and `git fetch` commands. As a result, Git sends credentials proactively instead of waiting for a `401` response. This behavior ensures the username is propagated to Gitaly for public projects. | | `FF_HASH_CACHE_KEYS` | `false` | {{< icon name="dotted-circle" >}} No | | When GitLab Runner creates or extracts caches, it hashes the cache keys (SHA256) before using them, both for local and distributed caches (for example, S3). For more information, see [cache key handling](advanced-configuration.md#cache-key-handling). | | `FF_ENABLE_JOB_INPUTS_INTERPOLATION` | `true` | {{< icon name="dotted-circle" >}} No | | When enabled, job inputs are interpolated. For more information, see [&17833](https://gitlab.com/groups/gitlab-org/-/epics/17833). | | `FF_USE_JOB_ROUTER` | `false` | {{< icon name="dotted-circle" >}} No | | Makes GitLab Runner fetch jobs by connecting to Job Router rather than GitLab directly. | | `FF_SCRIPT_TO_STEP_MIGRATION` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, user scripts are migrated to steps and executed with the step-runner. | | `FF_USE_PARALLEL_CACHE_TRANSFER` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, cache uploads and downloads use parallel object storage transfers: GoCloud writes use multipart with concurrent parts; downloads use concurrent HTTP Range or GoCloud range reads. When disabled, uploads use a single concurrent part stream and downloads use one stream. Improves throughput on high-bandwidth links when enabled. Tune with `CACHE_CONCURRENCY` and `CACHE_CHUNK_SIZE`. | | `FF_USE_PARALLEL_ARTIFACT_TRANSFER` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, artifact downloads that use `direct_download` and receive a redirect to object storage may use parallel HTTP Range GETs when the backend supports `206 Partial Content` with a `Content-Range` total. When disabled, a single download stream is used. Chunk size and concurrency are fixed in the runner (not `CACHE_*` variables). | | `FF_CONCRETE` | `false` | {{< icon name="dotted-circle" >}} No | | When enabled, traditional script execution is migrated to and executed with the step-runner. | ## Enable feature flag in pipeline configuration You can use [CI/CD variables](https://docs.gitlab.com/ci/variables/) to enable feature flags: - For all jobs in the pipeline (globally): ```yaml variables: FEATURE_FLAG_NAME: 1 ``` - For a single job: ```yaml job: stage: test variables: FEATURE_FLAG_NAME: 1 script: - echo "Hello" ``` ## Enable feature flag in runner environment variables To enable the feature for every job a Runner runs, specify the feature flag as an [`environment`](advanced-configuration.md#the-runners-section) variable in the [Runner configuration](advanced-configuration.md): ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" limit = 0 executor = "docker" builds_dir = "" shell = "" environment = ["FEATURE_FLAG_NAME=1"] ``` ## Enable feature flag in runner configuration You can enable feature flags by specifying them under `[runners.feature_flags]`. This setting prevents any job from overriding the feature flag values. Some feature flags are also only usable when you configure this setting, because they don't deal with how the job is executed. ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" executor = "docker" [runners.feature_flags] FF_USE_DIRECT_DOWNLOAD = true ``` ================================================ FILE: docs/configuration/gpus.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Using Graphical Processing Units (GPUs) --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} {{< history >}} - Introduced in GitLab Runner 13.9. {{< /history >}} GitLab Runner supports the use of Graphical Processing Units (GPUs). The following section describes the required configuration to enable GPUs for various executors. ## Shell executor No runner configuration is needed. ## Docker executor > [!warning] > If you're using Podman as the container runtime engine, GPUs are not detected. > For more information, see [issue 39095](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/39095). Prerequisites: - Install [NVIDIA Driver](https://docs.nvidia.com/datacenter/tesla/driver-installation-guide/index.html). - Install [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html). Use the `gpus` or `service_gpus` configuration option in the [`runners.docker` section](advanced-configuration.md#the-runnersdocker-section): ```toml [runners.docker] gpus = "all" service_gpus = "all" ``` ## Docker Machine executor See the [documentation for the GitLab fork of Docker Machine](../executors/docker_machine.md#using-gpus-on-google-compute-engine). ## Kubernetes executor Prerequisites: - Ensure that [the node selector chooses a node with GPU support](https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/). - Enable the `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` feature flag. To enable GPU support, configure the runner to request GPU resources in the pod specification. For example: ```toml [[runners.kubernetes.pod_spec]] name = "gpu" patch = ''' containers: - name: build resources: requests: nvidia.com/gpu: 1 limits: nvidia.com/gpu: 1 ''' patch_type = "strategic" # <--- `strategic` patch_type ``` Adjust the GPU count in `requests` and `limits` based on your job requirements. GitLab Runner has been [tested on Amazon Elastic Kubernetes Service](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4355) with [GPU-enabled instances](https://docs.aws.amazon.com/dlami/latest/devguide/gpu.html). ## Validate that GPUs are enabled You can use runners with NVIDIA GPUs. For NVIDIA GPUs, one way to ensure that a GPU is enabled for a CI job is to run `nvidia-smi` at the beginning of the script. For example: ```yaml train: script: - nvidia-smi ``` If GPUs are enabled, the output of `nvidia-smi` displays the available devices. In the following example, a single NVIDIA Tesla P4 is enabled: ```shell +-----------------------------------------------------------------------------+ | NVIDIA-SMI 450.51.06 Driver Version: 450.51.06 CUDA Version: 11.0 | |-------------------------------+----------------------+----------------------+ | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC | | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | | | | MIG M. | |===============================+======================+======================| | 0 Tesla P4 Off | 00000000:00:04.0 Off | 0 | | N/A 43C P0 22W / 75W | 0MiB / 7611MiB | 3% Default | | | | N/A | +-------------------------------+----------------------+----------------------+ +-----------------------------------------------------------------------------+ | Processes: | | GPU GI CI PID Type Process name GPU Memory | | ID ID Usage | |=============================================================================| | No running processes found | +-----------------------------------------------------------------------------+ ``` If the hardware does not support a GPU, `nvidia-smi` should fail either because it's missing or because it can't communicate with the driver: ```shell modprobe: ERROR: could not insert 'nvidia': No such device NVIDIA-SMI has failed because it couldn't communicate with the NVIDIA driver. Make sure that the latest NVIDIA driver is installed and running. ``` ================================================ FILE: docs/configuration/init.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: The system services of GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner uses the [Go `service` library](https://github.com/kardianos/service) to detect the underlying OS and eventually install the service file based on the init system. > [!note] > The package `service` installs, un-installs, starts, stops, and runs a program as a > service (daemon). Windows XP+, Linux (systemd, Upstart, and System V), > and macOS (`launchd`) are supported. When GitLab Runner [is installed](../install/_index.md), the service file is automatically created: - **systemd**: `/etc/systemd/system/gitlab-runner.service` - **Upstart**: `/etc/init/gitlab-runner` ## Setting custom environment variables You can run GitLab Runner with custom environment variables. For example, you want to define `GOOGLE_APPLICATION_CREDENTIALS` in the runner's environment. This action is different from the [`environment` configuration setting](advanced-configuration.md#the-runners-section), which defines the variables that are automatically added to all jobs executed by a runner. ### Customizing systemd For runners that use systemd, create `/etc/systemd/system/gitlab-runner.service.d/env.conf` using one `Environment=key=value` line for each variable to export. For example: ```toml [Service] Environment=GOOGLE_APPLICATION_CREDENTIALS=/etc/gitlab-runner/gce-credentials.json ``` Then reload the configuration: ```shell systemctl daemon-reload systemctl restart gitlab-runner.service ``` ### Customizing Upstart For runners that use Upstart, create `/etc/init/gitlab-runner.override` and export the desired variables. For example: ```shell export GOOGLE_APPLICATION_CREDENTIALS="/etc/gitlab-runner/gce-credentials.json" ``` Restart the runner for this to take effect. ## Overriding default stopping behavior In some cases, you might want to override the default behavior of the service. For example, when you upgrade GitLab Runner, you should stop it gracefully until all running jobs are finished. However, systemd, Upstart, or other services might immediately restart the process without even noticing. So, when you upgrade GitLab Runner, the installation script kills, and restarts the runner process that was probably handling new jobs at the time. ### Overriding systemd For runners that use systemd, create `/etc/systemd/system/gitlab-runner.service.d/kill.conf` with the following content: ```toml [Service] TimeoutStopSec=7200 KillSignal=SIGQUIT ``` After adding these two settings to the systemd unit configuration, you can stop the runner. After the runner stops, systemd uses `SIGQUIT` as the kill signal to stop the process. Additionally, a two-hour timeout is set for the stop command. If any jobs don't terminate gracefully before this timeout, systemd kills the process by using `SIGKILL`. ### Overriding Upstart For runners that use Upstart, create `/etc/init/gitlab-runner.override` with the following content: ```shell kill signal SIGQUIT kill timeout 7200 ``` After adding these two settings to the Upstart unit configuration, you can stop the runner. Upstart does the same as systemd above. ================================================ FILE: docs/configuration/macos_setup.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Set up macOS runners --- To run a CI/CD job on a macOS runner, complete the following steps in order. When you're done, GitLab Runner will be running on a macOS machine and an individual runner will be ready to process jobs. - Change the system shell to Bash. - Install Homebrew, rbenv, and GitLab Runner. - Configure rbenv and install Ruby. - Install Xcode. - Register a runner. - Configure CI/CD. ## Prerequisites Before you begin: - Install a recent version of macOS. This guide was developed on 11.4. - Ensure you have terminal or SSH access to the machine. ## Change the system shell to Bash Newer versions of macOS use Zsh as the default shell. However, the runner's shell executor requires Bash to ensure CI/CD scripts execute correctly because many use Bash-specific syntax and features. 1. Connect to your machine and determine the default shell: ```shell echo $SHELL ``` 1. If the result is not `/bin/bash`, change the shell by running: ```shell chsh -s /bin/bash ``` 1. Enter your password. 1. Restart your terminal or reconnect by using SSH. 1. Run `echo $SHELL` again. The result should be `/bin/bash`. ## Install Homebrew, rbenv, and GitLab Runner The runner needs certain environment options to connect to the machine and run a job. 1. Install the [Homebrew package manager](https://brew.sh/): ```shell /bin/bash -c "$(curl "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")" ``` 1. Set up [`rbenv`](https://github.com/rbenv/rbenv), which is a Ruby version manager, and GitLab Runner: ```shell brew install rbenv gitlab-runner brew services start gitlab-runner ``` ## Configure rbenv and install Ruby Now configure rbenv and install Ruby. 1. Add rbenv to the Bash environment: ```shell echo 'if which rbenv > /dev/null; then eval "$(rbenv init -)"; fi' >> ~/.bash_profile source ~/.bash_profile ``` 1. Install Ruby 3.3.x and set it as the machine's global default: ```shell rbenv install 3.3.4 rbenv global 3.3.4 ``` ## Install Xcode Now install and configure Xcode. 1. Go to one of these locations and install Xcode: - The Apple App Store. - The [Apple Developer Portal](https://developer.apple.com/). - [`xcode-install`](https://github.com/xcpretty/xcode-install). This project aims to make it easier to download various Apple dependencies from the command line. 1. Agree to the license and install the recommended additional components. You can do this by opening Xcode and following the prompts, or by running the following command in the terminal: ```shell sudo xcodebuild -runFirstLaunch ``` 1. Update the active developer directory so that Xcode loads the proper command line tools during your build: ```shell sudo xcode-select -s /Applications/Xcode.app/Contents/Developer ``` ### Create and register a project runner Now [create and register](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token) a project runner. When you create and register the runner: - In GitLab, add the tag `macos` to ensure macOS jobs run on this macOS machine. - In the command-line, select `shell` as the [executor](../executors/_index.md). After you register the runner, a success message displays in the command-line: ```shell Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded! ``` To view the runner: 1. On the top bar, select **Search or go to** and find your project or group. 1. Select **Settings > CI/CD**. 1. Expand **Runners**. ### Configure CI/CD In your GitLab project, configure CI/CD and start a build. You can use this sample `.gitlab-ci.yml` file. Notice the tags match the tags you used to register the runner. ```yaml stages: - build - test variables: LANG: "en_US.UTF-8" before_script: - gem install bundler - bundle install - gem install cocoapods - pod install build: stage: build script: - bundle exec fastlane build tags: - macos test: stage: test script: - bundle exec fastlane test tags: - macos ``` The macOS runner should now build your project. ================================================ FILE: docs/configuration/oracle_cloud_performance.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Configure GitLab Runner for Oracle Cloud Infrastructure --- GitLab Code Quality jobs that run in Oracle Cloud Infrastructure (OCI) environments with Container Runtime Interface (CRI) can experience performance degradation. To optimize your GitLab Runner performance in OCI: 1. Add an empty directory volume to your GitLab Runner configuration. 1. Configure specific Docker driver settings in your `.gitlab-ci.yml` file. This configuration applies to environments with: - Cloud provider: Oracle Cloud Infrastructure (OCI) - Container runtime: Container Runtime Interface (CRI) - Process: GitLab Code Quality jobs - Runner type: GitLab Self-Managed Runners ## Add an empty directory volume To define an empty directory for GitLab Runner configuration, add the following block to the runners section of your `values.yaml` file: ```yaml [[runners.kubernetes.volumes.empty_dir]] mount_path = "/var/lib" name = "docker-data" ``` ### Example runner configuration The following example shows a complete Helm chart `values.yaml` for the GitLab Runner that includes the fix: ```yaml image: registry: registry.gitlab.com image: gitlab-org/gitlab-runner tag: alpine-v16.11.0 useTini: false imagePullPolicy: IfNotPresent gitlabUrl: https://gitlab.com/ runnerToken: "" terminationGracePeriodSeconds: 3600 concurrent: 100 shutdown_timeout: 0 checkInterval: 5 logLevel: debug sessionServer: enabled: false ## For RBAC support: rbac: create: true rules: [] clusterWideAccess: false podSecurityPolicy: enabled: false resourceNames: - gitlab-runner metrics: enabled: false portName: metrics port: 9252 serviceMonitor: enabled: false service: enabled: false type: ClusterIP runners: config: | [[runners]] output_limit = 200960 [runners.kubernetes] privileged = true allow_privilege_escalation = true namespace = "{{.Release.Namespace}}" image = "ubuntu:22.04" helper_image_flavor = "ubuntu" pull_policy = "if-not-present" executor = "kubernetes" [[runners.kubernetes.volumes.host_path]] name = "buildah" mount_path = "/var/lib/containers/storage" read_only = false [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] mount_path = "/var/lib" name = "docker-data" [[runners.kubernetes.services]] alias = "dind" command = [ "--host=tcp://0.0.0.0:2375", "--host=unix://var/run/docker.sock", ] [runners.cache] Type = "s3" Path = "gitlab_runner" Shared = true [runners.cache.s3] BucketName = "gitlab-shared-caching" BucketLocation = "ap-singapore-1" ServerAddress = ".compat.objectstorage.ap-singapore-1.oraclecloud.com" AccessKey = "" SecretKey = "" configPath: "" tags: "" cache: {} securityContext: allowPrivilegeEscalation: false readOnlyRootFilesystem: false runAsNonRoot: true privileged: false capabilities: drop: ["ALL"] strategy: {} podSecurityContext: runAsUser: 100 fsGroup: 65533 resources: {} affinity: {} topologySpreadConstraints: {} nodeSelector: {} tolerations: [] hostAliases: [] deploymentAnnotations: {} deploymentLabels: {} podAnnotations: {} podLabels: {} priorityClassName: "" secrets: [] configMaps: {} volumeMounts: [] volumes: [] ``` ## Update your `.gitlab-ci.yml` file To unselect the default `overlay2` driver, add the following key as an empty variable to your existing Code Quality job: ```shell DOCKER_DRIVER: "" ``` ### Example Code Quality job configuration The following example shows Code Quality job configuration in your `.gitlab-ci.yml` file: ```yaml code_quality: services: - name: $CODE_QUALITY_DIND_IMAGE command: ['--tls=false', '--host=tcp://0.0.0.0:2375'] variables: CODECLIMATE_PREFIX: $CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX/ CODECLIMATE_REGISTRY_USERNAME: $CI_DEPENDENCY_PROXY_USER CODECLIMATE_REGISTRY_PASSWORD: $CI_DEPENDENCY_PROXY_PASSWORD DOCKER_DRIVER: "" ``` ================================================ FILE: docs/configuration/proxy.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Running GitLab Runner behind a proxy --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} This guide aims specifically to making GitLab Runner with Docker executor work behind a proxy. Before continuing, ensure that you've already [installed Docker](https://docs.docker.com/get-started/get-docker/) and [GitLab Runner](../install/_index.md) on the same machine. ## Configuring `cntlm` > [!note] > If you already use a proxy without authentication, this section is optional and > you can skip straight to [configuring Docker](#configuring-docker-for-downloading-images). > Configuring `cntlm` is only needed if you are behind a proxy with authentication, > but it's recommended to use in any case. [`cntlm`](https://github.com/versat/cntlm) is a Linux proxy which can be used as a local proxy and has 2 major advantages compared to adding the proxy details everywhere manually: - One single source where you need to change your credentials - The credentials can not be accessed from the Docker runners Assuming you [have installed `cntlm`](https://www.howtoforge.com/linux-ntlm-authentication-proxy-isa-server-with-cntlm), you need to first configure it. ### Make `cntlm` listen to the `docker0` interface For added security and protection from the internet, bind `cntlm` to listen on `docker0` interface, which has an IP address that containers can reach. If you tell `cntlm` on the Docker host to bind only to this address, Docker containers can reach it, but the outside world can't. 1. Find the IP that Docker is using: ```shell ip -4 -oneline addr show dev docker0 ``` The IP address is usually `172.17.0.1`, let's call it `docker0_interface_ip`. 1. Open the configuration file for `cntlm` (`/etc/cntlm.conf`). Enter your username, password, domain and proxy hosts, and configure the `Listen` IP address which you found from the previous step. It should look like this: ```plaintext Username testuser Domain corp-uk Password password Proxy 10.0.0.41:8080 Proxy 10.0.0.42:8080 Listen 172.17.0.1:3128 # Change to your docker0 interface IP ``` 1. Save the changes and restart its service: ```shell sudo systemctl restart cntlm ``` ## Configuring Docker for downloading images > [!note] > The following apply to OSes with systemd support. For information about how to use proxy, see [Docker documentation](https://docs.docker.com/engine/daemon/proxy/). The service file should look like this: ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" ``` ## Adding Proxy variables to the GitLab Runner configuration The proxy variables need to also be added to the GitLab Runner configuration, so that it can connect to GitLab.com from behind the proxy. This action is the same as adding the proxy to the Docker service above: 1. Create a systemd drop-in directory for the `gitlab-runner` service: ```shell mkdir /etc/systemd/system/gitlab-runner.service.d ``` 1. Create a file called `/etc/systemd/system/gitlab-runner.service.d/http-proxy.conf` that adds the `HTTP_PROXY` environment variables: ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" ``` To connect GitLab Runner to any internal URLs, like a GitLab Self-Managed instance, set a value for the `NO_PROXY` environment variable. ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" Environment="NO_PROXY=gitlab.example.com" ``` 1. Save the file and flush changes: ```shell systemctl daemon-reload ``` 1. Restart GitLab Runner: ```shell sudo systemctl restart gitlab-runner ``` 1. Verify that the configuration has been loaded: ```shell systemctl show --property=Environment gitlab-runner ``` You should see: ```ini Environment=HTTP_PROXY=http://docker0_interface_ip:3128/ HTTPS_PROXY=http://docker0_interface_ip:3128/ ``` ## Adding the Proxy to the Docker containers After you [register your runner](../register/_index.md), you may want to propagate your proxy settings to the Docker containers (for example, for `git clone`). To do this, you need to edit `/etc/gitlab-runner/config.toml` and add the following to the `[[runners]]` section: ```toml pre_get_sources_script = "git config --global http.proxy $HTTP_PROXY; git config --global https.proxy $HTTPS_PROXY" environment = ["https_proxy=http://docker0_interface_ip:3128", "http_proxy=http://docker0_interface_ip:3128", "HTTPS_PROXY=docker0_interface_ip:3128", "HTTP_PROXY=docker0_interface_ip:3128"] ``` Where `docker0_interface_ip` is the IP address of the `docker0` interface. > [!note] > In our examples, we are setting both lower case and upper case variables > because certain programs expect `HTTP_PROXY` and others `http_proxy`. > Unfortunately, there is no > [standard](https://unix.stackexchange.com/questions/212894/whats-the-right-format-for-the-http-proxy-environment-variable-caps-or-no-ca#212972) > on these kinds of environment variables. ## Proxy settings when using `dind` service When using the [Docker-in-Docker executor](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker) (`dind`), it may be necessary to specify `docker:2375,docker:2376` in the `NO_PROXY` environment variable. The ports are required, otherwise `docker push` is blocked. Communication between `dockerd` from `dind` and the local `docker` client (as described here: ) uses proxy variables held in root's Docker configuration. To configure this, you need to edit `/root/.docker/config.json` to include your complete proxy configuration, for example: ```json { "proxies": { "default": { "httpProxy": "http://proxy:8080", "httpsProxy": "http://proxy:8080", "noProxy": "docker:2375,docker:2376" } } } ``` To pass on the settings to the container of the Docker executor, a `$HOME/.docker/config.json` also needs to be created inside the container. This may be scripted as a `before_script` in the `.gitlab-ci.yml`, for example: ```yaml before_script: - mkdir -p $HOME/.docker/ - 'echo "{ \"proxies\": { \"default\": { \"httpProxy\": \"$HTTP_PROXY\", \"httpsProxy\": \"$HTTPS_PROXY\", \"noProxy\": \"$NO_PROXY\" } } }" > $HOME/.docker/config.json' ``` Or alternatively, in the configuration of the `gitlab-runner` (`/etc/gitlab-runner/config.toml`) that is affected: ```toml [[runners]] pre_build_script = "mkdir -p $HOME/.docker/ && echo \"{ \\\"proxies\\\": { \\\"default\\\": { \\\"httpProxy\\\": \\\"$HTTP_PROXY\\\", \\\"httpsProxy\\\": \\\"$HTTPS_PROXY\\\", \\\"noProxy\\\": \\\"$NO_PROXY\\\" } } }\" > $HOME/.docker/config.json" ``` > [!note] > An additional level of escaping `"` is required because this creates a > JSON file with a shell specified as a single string inside a TOML file. > Because this is not YAML, do not escape the `:`. If the `NO_PROXY` list needs to be extended, wildcards `*` only work for suffixes, but not for prefixes or CIDR notation. For more information, see and . ## Handling rate limited requests A GitLab instance may be behind a reverse proxy that has rate-limiting on API requests to prevent abuse. GitLab Runner sends multiple requests to the API and could go over these rate limits. As a result, GitLab Runner handles rate limited scenarios by using the following [retry logic](#retry-logic): ### Retry logic When GitLab Runner receives a `429 Too Many Requests` response, it follows this retry sequence: 1. The runner checks the response headers for a `RateLimit-ResetTime` header. - The `RateLimit-ResetTime` header should have a value which is a valid HTTP date (RFC1123), like `Wed, 21 Oct 2015 07:28:00 GMT`. - If the header is present and has a valid value, the runner waits until the specified time and issues another request. 1. If the `RateLimit-ResetTime` header is invalid or missing, the runner checks the response headers for a `Retry-After` header. - The `Retry-After` header should have a value in seconds format, like `Retry-After: 30`. - If the header format is present and has a valid value, the runner waits until the specified time and issues another request. 1. If both headers are missing or invalid, the runner waits for the default interval and issues another request. The runner retries failed requests up to 5 times. If all retries fail, the runner logs the error from the final response. ### Supported header formats | Header | Format | Example | |-----------------------|---------------------|---------------------------------| | `RateLimit-ResetTime` | HTTP Date (RFC1123) | `Wed, 21 Oct 2015 07:28:00 GMT` | | `Retry-After` | Seconds | `30` | > [!note] > The header `RateLimit-ResetTime` is case-insensitive because all header keys are run > through the [`http.CanonicalHeaderKey`](https://pkg.go.dev/net/http#CanonicalHeaderKey) function. ================================================ FILE: docs/configuration/runner_autoscale_aws/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Configure runner Docker Machine autoscaling on AWS EC2 --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} One of the biggest advantages of GitLab Runner is its ability to automatically spin up and down VMs to make sure your builds get processed immediately. It's a great feature, and if used correctly, it can be extremely useful in situations where you don't use your runners 24/7 and want to have a cost-effective and scalable solution. ## Introduction In this tutorial, we'll explore how to properly configure GitLab Runner in AWS. The instance in AWS will serve as a runner manager that spawns new Docker instances on demand. The runners on these instances are automatically created. They use the parameters covered in this guide and do not require manual configuration after creation. In addition, we'll make use of [Amazon's EC2 Spot instances](https://aws.amazon.com/ec2/spot/) which will greatly reduce the costs of the GitLab Runner instances while still using quite powerful autoscaling machines. ## Prerequisites A familiarity with Amazon Web Services (AWS) is required as this is where most of the configuration will take place. We suggest a quick read through Docker machine [`amazonec2` driver documentation](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md) to familiarize yourself with the parameters we will set later in this article. Your GitLab Runner is going to need to talk to your GitLab instance over the network, and that is something you need think about when configuring any AWS security groups or when setting up your DNS configuration. For example, you can keep the EC2 resources segmented away from public traffic in a different VPC to better strengthen your network security. Your environment is likely different, so consider what works best for your situation. ### AWS security groups Docker Machine will attempt to use a [default security group](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md/#security-group) with rules for port `2376` and SSH `22`, which is required for communication with the Docker daemon. Instead of relying on Docker, you can create a security group with the rules you need and provide that in the GitLab Runner options as we will [see below](#the-runnersmachine-section). This way, you can customize it to your liking ahead of time based on your networking environment. You have to make sure that ports `2376` and `22` are accessible by the [Runner Manager instance](#prepare-the-runner-manager-instance). ### AWS credentials You'll need an [AWS Access Key](https://docs.aws.amazon.com/IAM/latest/UserGuide/security-creds.html) tied to a user with permission to scale (EC2) and update the cache (via S3). Create a new user with [policies](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-policies-for-amazon-ec2.html) for EC2 (AmazonEC2FullAccess) and S3. For more information about the minimal permissions required for S3, see [`runners.cache.s3`](../advanced-configuration.md#the-runnerscaches3-section). To be more secure, you can disable console login for that user. Keep the tab open or copy paste the security credentials in an editor as we'll use them later during the [GitLab Runner configuration](#the-runnersmachine-section). You can also create an [EC2 instance profile](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) with the required `AmazonEC2FullAccess` and `AmazonS3FullAccess` policies. To provision new EC2 instances for the jobs' execution, attach this instance profile to the runner manager EC2 instance. If the runner machine is using an instance profile, include the `iam:PassRole` action in the instance profile of the runner manager. Example: ```json { "Statement": [ { "Action": "iam:PassRole", "Effect": "Allow", "Resource": "arn:aws:iam:::role/instance-profile-of-runner-machine" } ], "Version": "2012-10-17" } ``` ## Prepare the runner manager instance The first step is to install GitLab Runner in an EC2 instance that will serve as the runner manager that spawns new machines. Choose a distribution that both Docker and GitLab Runner support, like Ubuntu, Debian, CentOS, or RHEL. This doesn't have to be a powerful machine because a runner manager instance doesn't run jobs itself. For your initial configuration, you can start with a smaller instance. This machine is a dedicated host because we need it always up and running. Therefore, it is the only host with an ongoing baseline cost. Install the prerequisites: 1. Log in to your server 1. [Install GitLab Runner from the official GitLab repository](../../install/linux-repository.md) 1. [Install Docker](https://docs.docker.com/engine/install/#server) 1. [Install Docker Machine from the GitLab fork](https://gitlab.com/gitlab-org/ci-cd/docker-machine) (Docker has deprecated Docker Machine) Now that the Runner is installed, it's time to register it. ## Registering the GitLab Runner Before configuring the GitLab Runner, you need to first register it, so that it connects with your GitLab instance: 1. [Obtain a runner token](https://docs.gitlab.com/ci/runners/) 1. [Register the runner](../../register/_index.md) 1. When asked the executor type, enter `docker+machine` You can now move on to the most important part, configuring the GitLab Runner. > [!note] > If you want every user in your instance to be able to use the autoscaled runners, > register the runner as a shared one. ## Configuring the runner Now that the runner is registered, you need to edit its configuration file and add the required options for the AWS machine driver. Let's first break it down to pieces. ### The global section In the global section, you can define the limit of the jobs that can be run concurrently across all runners (`concurrent`). This heavily depends on your needs, like how many users GitLab Runner will accommodate, how much time your builds take, etc. You can start with something low like `10`, and increase or decrease its value going forward. The `check_interval` option defines how often the runner should check GitLab for new jobs, in seconds. Example: ```toml concurrent = 10 check_interval = 0 ``` [Other options](../advanced-configuration.md#the-global-section) are also available. ### The `runners` section From the `[[runners]]` section, the most important part is the `executor` which must be set to `docker+machine`. Most of those settings are taken care of when you register the runner for the first time. `limit` sets the maximum number of machines (running and idle) that this runner will spawn. For more information, check the [relationship between `limit`, `concurrent` and `IdleCount`](../autoscale.md#how-concurrent-limit-and-idlecount-generate-the-upper-limit-of-running-machines). Example: ```toml [[runners]] name = "gitlab-aws-autoscaler" url = "" token = "" executor = "docker+machine" limit = 20 ``` [Other options](../advanced-configuration.md#the-runners-section) under `[[runners]]` are also available. ### The `runners.docker` section In the `[runners.docker]` section you can define the default Docker image to be used by the child runners if it's not defined in [`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/). By using `privileged = true`, all runners will be able to run [Docker in Docker](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker) which is useful if you plan to build your own Docker images via GitLab CI/CD. Next, we use `disable_cache = true` to disable the Docker executor's inner cache mechanism since we will use the distributed cache mode as described in the following section. Example: ```toml [runners.docker] image = "alpine" privileged = true disable_cache = true ``` [Other options](../advanced-configuration.md#the-runnersdocker-section) under `[runners.docker]` are also available. ### The `runners.cache` section To speed up your jobs, GitLab Runner provides a cache mechanism where selected directories and/or files are saved and shared between subsequent jobs. While not required for this setup, it is recommended to use the distributed cache mechanism that GitLab Runner provides. Since new instances will be created on demand, it is essential to have a common place where the cache is stored. In the following example, we use Amazon S3: ```toml [runners.cache] Type = "s3" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "" SecretKey = "" BucketName = "" BucketLocation = "us-west-2" ``` Here's some more information to further explore the cache mechanism: - [Reference for `runners.cache`](../advanced-configuration.md#the-runnerscache-section) - [Reference for `runners.cache.s3`](../advanced-configuration.md#the-runnerscaches3-section) - [Deploying and using a cache server for GitLab Runner](../autoscale.md#distributed-runners-caching) - [How cache works](https://docs.gitlab.com/ci/yaml/#cache) ### The `runners.machine` section This is the most important part of the configuration and it's the one that tells GitLab Runner how and when to spawn new or remove old Docker Machine instances. We will focus on the AWS machine options, for the rest of the settings read about the: - [Autoscaling algorithm and the parameters it's based on](../autoscale.md#autoscaling-algorithm-and-parameters) - depends on the needs of your organization - [Autoscaling periods](../autoscale.md#configure-autoscaling-periods) - useful when there are regular time periods in your organization when no work is done, for example weekends Here's an example of the `runners.machine` section: ```toml [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 10 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-zone=x", "amazonec2-use-private-address=true", "amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true", "amazonec2-security-group=xxxxx", "amazonec2-instance-type=m4.2xlarge", ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` The Docker Machine driver is set to `amazonec2` and the machine name has a standard prefix followed by `%s` (required) that is replaced by the ID of the child runner: `gitlab-docker-machine-%s`. Now, depending on your AWS infrastructure, there are many options you can set up under `MachineOptions`. Below you can see the most common ones. | Machine option | Description | |------------------------------------------------------------------------|-------------| | `amazonec2-access-key=XXXX` | The AWS access key of the user that has permissions to create EC2 instances, see [AWS credentials](#aws-credentials). | | `amazonec2-secret-key=XXXX` | The AWS secret key of the user that has permissions to create EC2 instances, see [AWS credentials](#aws-credentials). | | `amazonec2-region=eu-central-2` | The region to use when launching the instance. You can omit this entirely and the default `us-east-1` will be used. | | `amazonec2-vpc-id=vpc-xxxxx` | Your [VPC ID](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#vpc-id) to launch the instance in. | | `amazonec2-subnet-id=subnet-xxxx` | The AWS VPC subnet ID. | | `amazonec2-zone=x` | If not specified, the [availability zone is `a`](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#environment-variables-and-default-values), it needs to be set to the same availability zone as the specified subnet, for example when the zone is `eu-west-1b` it has to be `amazonec2-zone=b` | | `amazonec2-use-private-address=true` | Use the private IP address of Docker Machines, but still create a public IP address. Useful to keep the traffic internal and avoid extra costs. | | `amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true` | AWS extra tag key-value pairs, useful to identify the instances on the AWS console. The "Name" [tag](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html) is set to the machine name by default. We set the "runner-manager-name" to match the runner name set in `[[runners]]`, so that we can filter all the EC2 instances created by a specific manager setup. | | `amazonec2-security-group=xxxx` | AWS VPC security group name, not the security group ID. See [AWS security groups](#aws-security-groups). | | `amazonec2-instance-type=m4.2xlarge` | The instance type that the child runners will run on. | | `amazonec2-ssh-user=xxxx` | The user that will have SSH access to the instance. | | `amazonec2-iam-instance-profile=xxxx_runner_machine_inst_profile_name` | The IAM instance profile to use for the runner machine. | | `amazonec2-ami=xxxx_runner_machine_ami_id` | The GitLab Runner AMI ID for a specific image. | | `amazonec2-request-spot-instance=true` | Use spare EC2 capacity that is available for less than the on-demand price. | | `amazonec2-spot-price=xxxx_runner_machine_spot_price=x.xx` | Spot instance bid price (in US dollars). Requires the `--amazonec2-request-spot-instance flag` set to `true`. If you omit the `amazonec2-spot-price`, Docker Machine sets the maximum price to a default value of `$0.50` per hour. | | `amazonec2-security-group-readonly=true` | Set the security group to read-only. | | `amazonec2-userdata=xxxx_runner_machine_userdata_path` | Specify the runner machine `userdata` path. | | `amazonec2-root-size=XX` | The root disk size of the instance (in GB). | Notes: - Under `MachineOptions` you can add anything that the [AWS Docker Machine driver supports](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#options). You are highly encouraged to read Docker's docs as your infrastructure setup may warrant different options to be applied. - The child instances will use by default Ubuntu 16.04 unless you choose a different AMI ID by setting `amazonec2-ami`. Set only [supported base operating systems for Docker Machine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/os-base). - If you specify `amazonec2-private-address-only=true` as one of the machine options, your EC2 instance won't get assigned a public IP. This is ok if your VPC is configured correctly with an Internet Gateway (IGW) and routing is fine, but it’s something to consider if you've got a more complex configuration. Read more in [Docker docs about VPC connectivity](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#vpc-connectivity). [Other options](../advanced-configuration.md#the-runnersmachine-section) under `[runners.machine]` are also available. ### Getting it all together Here's the full example of `/etc/gitlab-runner/config.toml`: ```toml concurrent = 10 check_interval = 0 [[runners]] name = "gitlab-aws-autoscaler" url = "" token = "" executor = "docker+machine" limit = 20 [runners.docker] image = "alpine" privileged = true disable_cache = true [runners.cache] Type = "s3" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "" SecretKey = "" BucketName = "" BucketLocation = "us-west-2" [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 100 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-use-private-address=true", "amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true", "amazonec2-security-group=XXXX", "amazonec2-instance-type=m4.2xlarge", ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` ## Cutting down costs with Amazon EC2 Spot instances As [described by](https://aws.amazon.com/ec2/spot/) Amazon: > Amazon EC2 Spot instances allow you to bid on spare Amazon EC2 computing capacity. Since Spot instances are often available at a discount compared to On-Demand pricing, you can significantly reduce the cost of running your applications, grow your application’s compute capacity and throughput for the same budget, and enable new types of cloud computing applications. In addition to the [`runners.machine`](#the-runnersmachine-section) options you picked above, in `/etc/gitlab-runner/config.toml` under the `MachineOptions` section, add the following: ```toml MachineOptions = [ "amazonec2-request-spot-instance=true", "amazonec2-spot-price=", ] ``` In this configuration with an empty `amazonec2-spot-price`, AWS sets your bidding price for a Spot instance to the default On-Demand price of that instance class. If you omit the `amazonec2-spot-price` completely, Docker Machine will set the maximum price to a [default value of $0.50 per hour](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#environment-variables-and-default-values). You may further customize your Spot instance request: ```toml MachineOptions = [ "amazonec2-request-spot-instance=true", "amazonec2-spot-price=0.03", "amazonec2-block-duration-minutes=60" ] ``` With this configuration, Docker Machines are created using Spot instances with a maximum Spot request price of $0.03 per hour and the duration of the Spot instance is capped at 60 minutes. The `0.03` number mentioned above is just an example, so be sure to check on the current pricing based on the region you picked. To learn more about Amazon EC2 Spot instances, visit the following links: - - - ### Caveats of Spot instances While Spot instances is a great way to use unused resources and minimize the costs of your infrastructure, you must be aware of the implications. Running CI jobs on Spot instances may increase the failure rates because of the Spot instances pricing model. If the maximum Spot price you specify exceeds the current Spot price you will not get the capacity requested. Spot pricing is revised on an hourly basis. Any existing Spot instances that have a maximum price below the revised Spot instance price will be terminated within two minutes and all jobs on Spot hosts will fail. As a consequence, the auto-scale Runner would fail to create new machines while it will continue to request new instances. This eventually will make 60 requests and then AWS won't accept any more. Then once the Spot price is acceptable, you are locked out for a bit because the call amount limit is exceeded. If you encounter that case, you can use the following command in the runner manager machine to see the Docker Machine's state: ```shell docker-machine ls -q --filter state=Error --format "{{.NAME}}" ``` > [!note] > There are some issues regarding making GitLab Runner gracefully handle Spot > price changes, and there are reports of `docker-machine` attempting to > continually remove a Docker Machine. GitLab has provided patches for both cases > in the upstream project. For more information, see > [issue 2771](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2771) and > [issue 2772](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2772). The GitLab fork does not support AWS EC2 fleets and their use with spot instances. As an alternative, you can use the [Continuous Kernel Integration Project's downstream fork](https://gitlab.com/cki-project/mirror/docker-machine). ## Conclusion In this guide we learned how to install and configure a GitLab Runner in autoscale mode on AWS. Using the autoscale feature of GitLab Runner can save you both time and money. Using the Spot instances that AWS provides can save you even more, but you must be aware of the implications. As long as your bid is high enough, there shouldn't be an issue. You can read the following use cases from which this tutorial was (heavily) influenced: - [HumanGeo switched from Jenkins to GitLab](https://about.gitlab.com/blog/humangeo-switches-jenkins-gitlab-ci/) - [Substrakt Health - Autoscale GitLab CI/CD runners and save 90% on EC2 costs](https://about.gitlab.com/blog/autoscale-ci-runners/) ================================================ FILE: docs/configuration/runner_autoscale_aws_fargate/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Autoscaling GitLab CI on AWS Fargate --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!warning] > The Fargate driver is community supported. GitLab Support will try to help debug problems, but offers no guarantees. The GitLab [custom executor](../../executors/custom.md) driver for [AWS Fargate](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/fargate) automatically launches a container on the Amazon Elastic Container Service (ECS) to execute each GitLab CI job. After you complete the tasks in this document, the executor can run jobs initiated from GitLab. Each time a commit is made in GitLab, the GitLab instance notifies the runner that a new job is available. The runner then starts a new task in the target ECS cluster, based on a task definition that you configured in AWS ECS. You can configure an AWS ECS task definition to use any Docker image. With this approach, you have complete flexibility in the type of builds that you can execute on AWS Fargate. ![GitLab Runner Fargate Driver Architecture](../img/runner_fargate_driver_ssh.png) This document shows an example that's meant to give you an initial understanding of the implementation. It is not meant for production use; additional security is required in AWS. For example, you might want two AWS security groups: - One used by the EC2 instance that hosts GitLab Runner and only accepts SSH connections from a restricted external IP range (for administrative access). - One that applies to the Fargate Tasks and that allows SSH traffic only from the EC2 instance. For any non-public container registry, your ECS task requires either [IAM permissions (for AWS ECR only)](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html) or [Private registry authentication for tasks](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/private-auth.html) for non-ECR private registries. You can use CloudFormation or Terraform to automate the provisioning and setup of your AWS infrastructure. CI/CD jobs use the image defined in the ECS task, rather than the value of the `image:` keyword in your `.gitlab-ci.yml` file. ECS doesn't allow you to override the image used for an ECS task. To work around this limitation, you can: - Create and use an image in the ECS task definition that contains all build dependencies of all projects the runner is used for. - Create multiple ECS task definitions with different images and specify the ARN in the `FARGATE_TASK_DEFINITION` CI/CD variable. - Consider creating an EKS cluster by following the official [AWS EKS Blueprints](https://aws-ia.github.io/terraform-aws-eks-blueprints/). For more information, see [Get started with GitLab EKS Fargate runners in 1 hour and zero code](https://about.gitlab.com/blog/eks-fargate-runner/). > [!warning] > Fargate abstracts container hosts, which limits configurability for container host properties. This affects runner workloads that require high IO to disk or network, because these properties have limited or no configurability with Fargate. Before you use GitLab Runner on Fargate, ensure runner workloads with high compute characteristics on CPU, memory, disk IO, or network IO are suitable for Fargate. ## Prerequisites Before you begin, you should have: - An AWS IAM user with permissions to create and configure EC2, ECS and ECR resources. - AWS VPC and subnets. - One or more AWS security groups. ## Step 1: Prepare a container image for the AWS Fargate task Prepare a container image. You can upload this image to a registry, where it can be used to create containers when GitLab jobs run. 1. Ensure the image has the tools required to build your CI job. For example, a Java project requires a `Java JDK` and build tools like Maven or Gradle. A Node.js project requires `node` and `npm`. 1. Ensure the image has GitLab Runner, which handles artifacts and caching. Refer to the [Run](../../executors/custom.md#run) stage section of the custom executor documentation for additional information. 1. Ensure the container image can accept an SSH connection through public-key authentication. The runner uses this connection to send the build commands defined in the `.gitlab-ci.yml` file to the container on AWS Fargate. The SSH keys are automatically managed by the Fargate driver. The container must be able to accept keys from the `SSH_PUBLIC_KEY` environment variable. View a [Debian example](https://gitlab.com/tmaczukin-test-projects/fargate-driver-debian) that includes GitLab Runner and the SSH configuration. View a [Node.js example](https://gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate). ## Step 2: Push the container image to a registry After you create your image, publish the image to a container registry for use in the ECS task definition. - To create a repository and push an image to ECR, follow the [Amazon ECR Repositories](https://docs.aws.amazon.com/AmazonECR/latest/userguide/Repositories.html) documentation. - To use the AWS CLI to push an image to ECR, follow the [Getting Started with Amazon ECR using the AWS CLI](https://docs.aws.amazon.com/AmazonECR/latest/userguide/getting-started-cli.html) documentation. - To use the [GitLab Container Registry](https://docs.gitlab.com/user/packages/container_registry/), you can use the [Debian](https://gitlab.com/tmaczukin-test-projects/fargate-driver-debian) or [NodeJS](https://gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate) example. The Debian image is published to `registry.gitlab.com/tmaczukin-test-projects/fargate-driver-debian:latest`. The NodeJS example image is published to `registry.gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate:latest`. ## Step 3: Create an EC2 instance for GitLab Runner Now create an AWS EC2 instance. In the next step you will install GitLab Runner on it. 1. Go to . 1. For the instance, select the Ubuntu Server 18.04 LTS AMI. The name may be different depending on the AWS region you selected. 1. For the instance type, choose t2.micro. Select **Next: Configure Instance Details**. 1. Leave the default for **Number of instances**. 1. For **Network**, select your VPC. 1. Set **Auto-assign Public IP** to **Enable**. 1. Under **IAM role**, select **Create new IAM role**. This role is for test purposes only and is not secure. 1. Select **Create role**. 1. Choose **AWS service** and under **Common use cases**, select **EC2**. Then select **Next: Permissions**. 1. Select the check box for the **AmazonECS_FullAccess** policy. Select **Next: Tags**. 1. Select **Next: Review**. 1. Type a name for the IAM role, for example `fargate-test-instance`, and select **Create role**. 1. Go back to the browser tab where you are creating the instance. 1. To the left of **Create new IAM role**, select the refresh button. Choose the `fargate-test-instance` role. Select **Next: Add Storage**. 1. Select **Next: Add Tags**. 1. Select **Next: Configure Security Group**. 1. Select **Create a new security group**, name it `fargate-test`, and ensure that a rule for SSH is defined (`Type: SSH, Protocol: TCP, Port Range: 22`). You must specify the IP ranges for inbound and outbound rules. 1. Select **Review and Launch**. 1. Select **Launch**. 1. Optional. Select **Create a new key pair**, name it `fargate-runner-manager` and select **Download Key Pair**. The private key for SSH is downloaded on your computer (check the directory configured in your browser). 1. Select **Launch Instances**. 1. Select **View Instances**. 1. Wait for the instance to be up. Note the `IPv4 Public IP` address. ## Step 4: Install and configure GitLab Runner on the EC2 instance Now install GitLab Runner on the Ubuntu instance. 1. Go to your GitLab project's **Settings > CI/CD** and expand the Runners section. Under **Set up a specific Runner manually**, note the registration token. 1. Ensure your key file has the right permissions by running `chmod 400 path/to/downloaded/key/file`. 1. SSH into the EC2 instance that you created by using: ```shell ssh ubuntu@[ip_address] -i path/to/downloaded/key/file ``` 1. When you are connected successfully, run the following commands: ```shell sudo mkdir -p /opt/gitlab-runner/{metadata,builds,cache} curl -s "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" | sudo bash sudo apt install gitlab-runner ``` 1. Run this command with the GitLab URL and registration token you noted in step 1. ```shell sudo gitlab-runner register --url "https://gitlab.com/" --registration-token TOKEN_HERE --name fargate-test-runner --run-untagged --executor custom -n ``` 1. Run `sudo vim /etc/gitlab-runner/config.toml` and add the following content: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "fargate-test" url = "https://gitlab.com/" token = "__REDACTED__" executor = "custom" builds_dir = "/opt/gitlab-runner/builds" cache_dir = "/opt/gitlab-runner/cache" [runners.custom] volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] config_exec = "/opt/gitlab-runner/fargate" config_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "config"] prepare_exec = "/opt/gitlab-runner/fargate" prepare_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "prepare"] run_exec = "/opt/gitlab-runner/fargate" run_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "run"] cleanup_exec = "/opt/gitlab-runner/fargate" cleanup_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "cleanup"] ``` 1. If you have a GitLab Self-Managed instance with a private CA, add this line: ```toml volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` [Learn more about trusting the certificate](../tls-self-signed.md#trusting-the-certificate-for-the-other-cicd-stages). The section of the `config.toml` file shown below is created by the registration command. Do not change it. ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 name = "fargate-test" url = "https://gitlab.com/" token = "__REDACTED__" executor = "custom" ``` 1. Run `sudo vim /etc/gitlab-runner/fargate.toml` and add the following content: ```toml LogLevel = "info" LogFormat = "text" [Fargate] Cluster = "test-cluster" Region = "us-east-2" Subnet = "subnet-xxxxxx" SecurityGroup = "sg-xxxxxxxxxxxxx" TaskDefinition = "test-task:1" EnablePublicIP = true [TaskMetadata] Directory = "/opt/gitlab-runner/metadata" [SSH] Username = "root" Port = 22 ``` - Note the value of `Cluster` and the name of the `TaskDefinition`. This example shows `test-task` with `:1` as the revision number. If a revision number is not specified, the latest **active** revision is used. - Choose your region. Take the `Subnet` value from the runner manager instance. - To find the security group ID: 1. In AWS, in the list of instances, select the EC2 instance you created. The details are displayed. 1. Under **Security groups**, select the name of the group you created. 1. Copy the **Security group ID**. In a production setting, follow [AWS guidelines](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-security-groups.html) for setting up and using security groups. - If `EnablePublicIP` is set to true, the public IP of the task container is gathered to perform the SSH connection. - If `EnablePublicIP` is set to false: - The Fargate driver uses the task container's private IP. To set up a connection when set to `false`, the VPC Security Group must have an inbound rule for Port 22 (SSH), where the source is the VPC CIDR. - To fetch external dependencies, provisioned AWS Fargate containers must have access to the public internet. To provide public internet access for AWS Fargate containers, you can use a NAT Gateway in the VPC. - The port number of the SSH server is optional. If omitted, the default SSH port (22) is used. - For more information about the section settings, see the [Fargate driver documentation](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/fargate/-/tree/master/docs#configuration). 1. Install the Fargate driver: ```shell sudo curl -Lo /opt/gitlab-runner/fargate "https://gitlab-runner-custom-fargate-downloads.s3.amazonaws.com/latest/fargate-linux-amd64" sudo chmod +x /opt/gitlab-runner/fargate ``` ## Step 5: Create an ECS Fargate cluster An Amazon ECS cluster is a grouping of ECS container instances. 1. Go to [`https://console.aws.amazon.com/ecs/home#/clusters`](https://console.aws.amazon.com/ecs/home#/clusters). 1. Select **Create Cluster**. 1. Choose **Networking only** type. Select **Next step**. 1. Name it `test-cluster` (the same as in `fargate.toml`). 1. Select **Create**. 1. Select **View cluster**. Note the region and account ID parts from the `Cluster ARN` value. 1. Select **Update Cluster**. 1. Next to `Default capacity provider strategy`, select **Add another provider** and choose `FARGATE`. Select **Update**. Refer to the AWS [documentation](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/Welcome.html) for detailed instructions on setting up and working with a cluster on ECS Fargate. ## Step 6: Create an ECS task definition In this step you will create a task definition of type `Fargate` and reference the container image that you might use for your CI builds. 1. Go to [`https://console.aws.amazon.com/ecs/home#/taskDefinitions`](https://console.aws.amazon.com/ecs/home#/taskDefinitions). 1. Select **Create new Task Definition**. 1. Choose **FARGATE** and select **Next step**. 1. Name it `test-task`. (Note: The name is the same value defined in the `fargate.toml` file but without `:1`). 1. Select values for **Task memory (GB)** and **Task CPU (vCPU)**. 1. Select **Add container**. Then: 1. Name it `ci-coordinator`, so the Fargate driver can inject the `SSH_PUBLIC_KEY` environment variable. 1. Define image (for example `registry.gitlab.com/tmaczukin-test-projects/fargate-driver-debian:latest`). 1. Define port mapping for 22/TCP. 1. Select **Add**. 1. Select **Create**. 1. Select **View task definition**. > [!warning] > A single Fargate task may launch one or more containers. > The Fargate driver injects the `SSH_PUBLIC_KEY` environment variable > in containers with the `ci-coordinator` name only. You must > have a container with this name in all task definitions used by the Fargate > driver. The container with this name should be the one that has the > SSH server and all GitLab Runner requirements installed, as described above. Refer to the AWS [documentation](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/create-task-definition.html) for detailed instructions on setting up and working with task definitions. For information about the ECS service permissions required to launch images from an AWS ECR, see [Amazon ECS task execution IAM role](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html). For information about ECS authentication to private registries including any hosted on a GitLab instance, see [Private registry authentication for tasks](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/private-auth.html). At this point the runner manager and Fargate Driver are configured and ready to start executing jobs on AWS Fargate. ## Step 7: Test the configuration Your configuration should now be ready to use. 1. In your GitLab project, create a `.gitlab-ci.yml` file: ```yaml test: script: - echo "It works!" - for i in $(seq 1 30); do echo "."; sleep 1; done ``` 1. Go to your project's **CI/CD > Pipelines**. 1. Select **Run Pipeline**. 1. Update the branch and any variables and select **Run Pipeline**. > [!note] > The `image` and `service` keywords in your `.gitlab-ci.yml` file are ignored. > The runner only uses the values specified in the task definition. ## Clean up If you want to perform a cleanup after testing the custom executor with AWS Fargate, remove the following objects: - EC2 instance, key pair, IAM role, and security group created in [step 3](#step-3-create-an-ec2-instance-for-gitlab-runner). - ECS Fargate cluster created in [step 5](#step-5-create-an-ecs-fargate-cluster). - ECS task definition created in [step 6](#step-6-create-an-ecs-task-definition). ## Configure a private AWS Fargate task To ensure a high level of security, configure [a private AWS Fargate task](https://repost.aws/knowledge-center/ecs-fargate-tasks-private-subnet). In this configuration, executors use only internal AWS IP addresses. They only allow outbound traffic from AWS so that CI/CD jobs run on a private AWS Fargate instance. To configure a private AWS Fargate task, complete the following steps to configure AWS and run the AWS Fargate task in the private subnet: 1. Ensure the existing public subnet has not reserved all IP addresses in the VPC address range. Inspect the `cird` address ranges of the VPC and subnet. If the subnet `cird` address range is a subset of the VPC `cird` address range, skip steps 2 and 4. Otherwise your VPC has no free address range, so you must delete and recreate the VPC and the public subnet: 1. Delete your existing subnet and VPC. 1. [Create a VPC](https://docs.aws.amazon.com/vpc/latest/privatelink/create-interface-endpoint.html#create-interface-endpoint) with the same configuration as the VPC you deleted and update the `cird` address, for example `10.0.0.0/23`. 1. [Create a public subnet](https://docs.aws.amazon.com/vpc/latest/privatelink/interface-endpoints.html) with the same configuration as the subnet you deleted. Use a `cird` address that is a subset of the VPC address range, for example `10.0.0.0/24`. 1. [Create a private subnet](https://docs.aws.amazon.com/vpc/latest/userguide/create-subnet.html#create-subnets) with the same configuration as the public subnet. Use a `cird` address range that does not overlap the public subnet range, for example `10.0.1.0/24`. 1. [Create a NAT gateway](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-nat-gateway.html), and place it inside the public subnet. 1. Modify the private subnet routing table so that the destination `0.0.0.0/0` points to the NAT gateway. 1. Update the `farget.toml` configuration: ```toml Subnet = "private-subnet-id" EnablePublicIP = false UsePublicIP = false ``` 1. Add the following inline policy to the IAM role associated with your Fargate task (the IAM role associated with Fargate tasks is typically named `ecsTaskExecutionRole` and should already exist.) ```json { "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "secretsmanager:GetSecretValue", "kms:Decrypt", "ssm:GetParameters" ], "Resource": [ "arn:aws:secretsmanager:*::secret:*", "arn:aws:kms:*::key/*" ] } ] } ``` 1. Change the "inbound rules" of your security group to reference the security-group itself. In the AWS configuration dialogue: - Set `Type` to `ssh`. - Set `Source` to `Custom`. - Select the security group. - Remove the exiting inbound rule that allows SSH access from any host. > [!warning] > When you remove the exiting inbound rule, you cannot use SSH to connect to the Amazon Elastic Compute Cloud instance. For more information, see the following AWS documentation: - [Amazon ECS task execution IAM role](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html) - [Amazon ECR interface VPC endpoints (AWS PrivateLink)](https://docs.aws.amazon.com/AmazonECR/latest/userguide/vpc-endpoints.html) - [Amazon ECS interface VPC endpoints](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/vpc-endpoints.html) - [VPC with public and private subnets](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-example-private-subnets-nat.html) ## Troubleshooting ### `No Container Instances were found in your cluster` error when testing the configuration `error="starting new Fargate task: running new task on Fargate: error starting AWS Fargate Task: InvalidParameterException: No Container Instances were found in your cluster."` The AWS Fargate Driver requires the ECS Cluster to be configured with a [default capacity provider strategy](#step-5-create-an-ecs-fargate-cluster). Further reading: - A default [capacity provider strategy](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html) is associated with each Amazon ECS cluster. If no other capacity provider strategy or launch type is specified, the cluster uses this strategy when a task runs or a service is created. - If a [`capacityProviderStrategy`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html#ECS-RunTask-request-capacityProviderStrategy) is specified, the `launchType` parameter must be omitted. If no `capacityProviderStrategy` or `launchType` is specified, the `defaultCapacityProviderStrategy` for the cluster is used. ### Metadata `file does not exist` error when running jobs `Application execution failed PID=xxxxx error="obtaining information about the running task: trying to access file \"/opt/gitlab-runner/metadata/-xxxxx.json\": file does not exist" cleanup_std=err job=xxxxx project=xx runner=` Ensure that your IAM Role policy is configured correctly and can perform write operations to create the metadata JSON file in `/opt/gitlab-runner/metadata/`. To test in a non-production environment, use the AmazonECS_FullAccess policy. Review your IAM role policy according to your organization's security requirements. ### `connection timed out` when running jobs `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"172.x.x.x\": connecting to server: connecting to server \"172.x.x.x:22\" as user \"root\": dial tcp 172.x.x.x:22: connect: connection timed out"` If `EnablePublicIP` is configured to false, ensure that your VPC Security Group has an inbound rule that allows SSH connectivity. Your AWS Fargate task container must accept the SSH traffic from the GitLab Runner EC2 instance. ### `connection refused` when running jobs `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"10.x.x.x\": connecting to server: connecting to server \"10.x.x.x:22\" as user \"root\": dial tcp 10.x.x.x:22: connect: connection refused"` Ensure that the task container has port 22 exposed and port mapping is configured based on the instructions in [Step 6: Create an ECS task definition](#step-6-create-an-ecs-task-definition). If the port is exposed and the container is configured: 1. Check to see if there are any errors for the container in **Amazon ECS > Clusters > Choose your task definition > Tasks**. 1. View tasks with a status of `Stopped` and check the latest one that failed. The **logs** tab has more details if there is a container failure. Alternatively, ensure that you can run the Docker container locally. ### Error: `ssh: unable to authenticate, attempted methods [none publickey], no supported methods remain` The following error occurs if an unsupported key type is being used due to an older version of the AWS Fargate driver. `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"172.x.x.x\": connecting to server: connecting to server \"172.x.x.x:22\" as user \"root\": ssh: handshake failed: ssh: unable to authenticate, attempted methods [none publickey], no supported methods remain"` To resolve this issue, install the latest AWS Fargate driver on the GitLab Runner EC2 instance: ```shell sudo curl -Lo /opt/gitlab-runner/fargate "https://gitlab-runner-custom-fargate-downloads.s3.amazonaws.com/latest/fargate-linux-amd64" sudo chmod +x /opt/gitlab-runner/fargate ``` ================================================ FILE: docs/configuration/slot_based_cgroups.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Slot-based cgroup support --- Slot-based cgroup support improves resource isolation and management when you use GitLab Runner with autoscaling. Slot-based cgroups automatically assign jobs to specific control groups (cgroups) based on the slot number allocated by the autoscaler. ## Benefits - Better resource isolation: Prevents resource interference between concurrent jobs on the same instance. - Easier monitoring: Per-slot resource usage can be tracked independently. - Improved debugging: Cgroup-based metrics help identify resource-hungry jobs. - Fine-grained control: Set resource limits per slot for predictable performance. ## Supported executors Slot-based cgroups work with autoscaling executors that use [taskscaler](https://gitlab.com/gitlab-org/fleeting/taskscaler) for slot management: - [Docker Autoscaler executor](../executors/docker_autoscaler.md#slot-based-cgroup-support) - [Instance executor](../executors/instance.md#slot-based-cgroup-support) ## Prerequisites - Linux host with cgroup v2 support - Root access for initial cgroup hierarchy setup - GitLab Runner with autoscaler functionality - Taskscaler for slot assignment (automatically provided by autoscaler) ## Configuration To enable slot-based cgroup support, add the following to your `config.toml`. ### For Docker with `systemd` cgroup driver If Docker is using the `systemd` cgroup driver (most common), use the `systemd` slice format: ```toml [[runners]] name = "my-autoscaler-runner" executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "runner-slot-${slot}.slice" [runners.autoscaler] capacity_per_instance = 4 ``` ### For Docker with `cgroupfs` driver If Docker is using the `cgroupfs` driver, use the raw `cgroup` path format: ```toml [[runners]] name = "my-autoscaler-runner" executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "gitlab-runner/slot-${slot}" [runners.autoscaler] capacity_per_instance = 4 ``` ### Configuration options | Setting | Description | Default | |---------|-------------|---------| | `use_slot_cgroups` | Enable slot-based cgroup assignment | `false` | | `slot_cgroup_template` | Template for cgroup paths. Use `${slot}` as placeholder. Format depends on Docker's cgroup driver (systemd: `runner-slot-${slot}.slice`, cgroupfs: `gitlab-runner/slot-${slot}`) | `"gitlab-runner/slot-${slot}"` | Templates use bash-style variable expansion with `${slot}` as the placeholder for the slot number. For example: - With `systemd` driver: `runner-slot-${slot}.slice` becomes `runner-slot-5.slice` for slot 5 - With `cgroupfs` driver: `gitlab-runner/slot-${slot}` becomes `gitlab-runner/slot-5` for slot 5 Check your Docker cgroup driver with: `docker info | grep "Cgroup Driver"` ### Docker-specific configuration When using the Docker Autoscaler executor, you can specify a separate template for service containers: ```toml [[runners]] executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "runner-slot-${slot}.slice" [runners.docker] service_slot_cgroup_template = "runner-slot-${slot}.slice" ``` | Setting | Description | Default | |---------|-------------|---------| | `service_slot_cgroup_template` | Template for service container cgroup paths. Must match Docker's cgroup driver format | Same as `slot_cgroup_template` | ## Environment setup Before enabling slot-based cgroups, prepare the cgroup hierarchy on your runner hosts. ### Setup script for systemd cgroup driver If Docker is using the `systemd` cgroup driver (check with `docker info | grep "Cgroup Driver"`), you must create `systemd` slices instead of raw cgroup directories. Create a setup script (`gitlab-runner-systemd-slice-setup.sh`): ```shell #!/bin/bash # gitlab-runner-systemd-slice-setup.sh # Script to set up systemd slices for GitLab Runner slot-based cgroups # This example configures 4 slots on an 8-core machine, with each slot pinned to 2 CPUs set -e MAX_SLOTS=4 # Adjust based on your capacity_per_instance configuration # CPU pinning configuration (2 CPUs per slot on an 8-core machine) # Format: comma-separated CPU list for systemd AllowedCPUs declare -a CPU_ASSIGNMENTS=( "0,1" # Slot 0: CPUs 0 and 1 "2,3" # Slot 1: CPUs 2 and 3 "4,5" # Slot 2: CPUs 4 and 5 "6,7" # Slot 3: CPUs 6 and 7 ) # Check if running as root if [[ $EUID -ne 0 ]]; then echo "This script must be run as root for systemd slice setup" exit 1 fi # Verify systemd is available if ! command -v systemctl &> /dev/null; then echo "Error: systemctl not found. This script requires systemd." exit 1 fi echo "Setting up systemd slices for GitLab Runner" echo "Configuration: $MAX_SLOTS slots on an 8-core machine (2 CPUs per slot)" for ((slot=0; slot "/etc/systemd/system/$slice_name" </dev/null || echo "inactive") echo " $slice_name: $status" done echo "" echo "To verify CPU assignments, check:" echo " systemctl show runner-slot-0.slice | grep AllowedCPUs" ``` Run the setup script: ```shell chmod +x gitlab-runner-systemd-slice-setup.sh sudo ./gitlab-runner-systemd-slice-setup.sh ``` ### Setup script for `cgroupfs` driver (alternative) If Docker is using the `cgroupfs` driver instead of `systemd`, use this alternative script that creates raw cgroup directories: ```shell #!/bin/bash # gitlab-runner-cgroup-setup.sh # Script to set up cgroup v2 hierarchy for GitLab Runner slot-based cgroups # This example configures 4 slots on an 8-core machine, with each slot pinned to 2 CPUs # Use this script only if Docker is using the cgroupfs driver (not systemd) set -e CGROUP_ROOT="/sys/fs/cgroup" RUNNER_CGROUP="gitlab-runner" MAX_SLOTS=4 # Adjust based on your capacity_per_instance configuration # CPU pinning configuration (2 CPUs per slot on an 8-core machine) # Format: "cpu_list" - adjust based on your CPU topology declare -a CPU_ASSIGNMENTS=( "0-1" # Slot 0: CPUs 0 and 1 "2-3" # Slot 1: CPUs 2 and 3 "4-5" # Slot 2: CPUs 4 and 5 "6-7" # Slot 3: CPUs 6 and 7 ) # Check if running as root if [[ $EUID -ne 0 ]]; then echo "This script must be run as root for cgroup setup" exit 1 fi # Verify cgroup v2 is available if [[ ! -f "$CGROUP_ROOT/cgroup.controllers" ]]; then echo "Error: cgroup v2 not detected. This script requires cgroup v2." exit 1 fi echo "Setting up cgroup v2 hierarchy for GitLab Runner" echo "Configuration: $MAX_SLOTS slots on an 8-core machine (2 CPUs per slot)" # Create base runner cgroup mkdir -p "$CGROUP_ROOT/$RUNNER_CGROUP" # Enable controllers if available if [[ -f "$CGROUP_ROOT/cgroup.controllers" ]]; then echo "+memory +cpu +cpuset" > "$CGROUP_ROOT/cgroup.subtree_control" 2>/dev/null || true fi # Create slot-specific cgroups for ((slot=0; slot "$CGROUP_ROOT/$RUNNER_CGROUP/cgroup.subtree_control" 2>/dev/null || true fi # Pin slot to specific CPUs echo "${CPU_ASSIGNMENTS[$slot]}" > "$slot_path/cpuset.cpus" # Set memory nodes (usually 0 for single NUMA node systems) echo "0" > "$slot_path/cpuset.mems" # Set permissions for GitLab Runner user chown -R gitlab-runner:gitlab-runner "$slot_path" 2>/dev/null || true done echo "Cgroup setup complete!" # Verify setup echo "" echo "Verifying cgroup setup:" for ((slot=0; slot /sys/fs/cgroup/$GITLAB_RUNNER_SLOT_CGROUP/memory.max - echo "50000" > /sys/fs/cgroup/$GITLAB_RUNNER_SLOT_CGROUP/cpu.max - ./my-process ``` ## Troubleshooting ### Containers fail to start with cgroup errors 1. Check that the cgroup paths exist under `/sys/fs/cgroup/`: ```shell ls -la /sys/fs/cgroup/gitlab-runner/ ``` 1. Ensure the GitLab Runner user has write access to the cgroup directories: ```shell ls -la /sys/fs/cgroup/gitlab-runner/slot-0/ ``` 1. Confirm `slot_cgroup_template` uses the correct format with `${slot}` placeholder: 1. Check GitLab Runner logs for specific cgroup creation errors: 1. Test manually: For Docker Autoscaler executor: ```shell docker run --rm --cgroup-parent=gitlab-runner/slot-0 alpine echo "test" ``` For Instance executor: ```yaml job: script: - echo "Slot cgroup: $GITLAB_RUNNER_SLOT_CGROUP" ``` ### Jobs use the same cgroup If you see a warning in the logs about templates not containing `${slot}` placeholder: ```plaintext level=warning msg="Slot cgroup template does not contain ${slot} placeholder. All jobs will use the same cgroup, defeating the purpose of slot-based isolation." ``` This means your `slot_cgroup_template` is missing the `${slot}` variable. Update your configuration to include the placeholder: ```toml [[runners]] slot_cgroup_template = "gitlab-runner/slot-${slot}" ``` ### Cgroup v2 not available If the setup script reports that cgroup v2 is not detected, you might need to enable it on your system. Check your Linux distribution's documentation for enabling cgroup v2. Modern distributions typically enable it by default. ================================================ FILE: docs/configuration/speed_up_job_execution.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Speed up job execution --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can improve performance of your jobs by caching your images and dependencies. ## Use a proxy for containers You can speed up the time it takes to download Docker images by using: - The GitLab Dependency Proxy or - A mirror of the DockerHub Registry - Other open source solutions ### GitLab Dependency Proxy To more quickly access container images, you can [use the Dependency Proxy](https://docs.gitlab.com/user/packages/dependency_proxy/) to proxy container images. ### Docker Hub Registry mirror You can also speed up the time it takes for your jobs to access container images by mirroring Docker Hub. This results in the [Registry as a pull through cache](https://docs.docker.com/docker-hub/image-library/mirror/). In addition to speeding up job execution, a mirror can make your infrastructure more resilient to Docker Hub outages and Docker Hub rate limits. When the Docker daemon is [configured to use the mirror](https://docs.docker.com/docker-hub/image-library/mirror/#configure-the-docker-daemon) it automatically checks for the image on your running instance of the mirror. If it's not available, it pulls the image from the public Docker registry and stores it locally before handing it back to you. The next request for the same image pulls from your local registry. For more information on how it works, see [Docker daemon configuration documentation](https://docs.docker.com/docker-hub/image-library/mirror/#configure-the-docker-daemon). #### Use a Docker Hub Registry mirror To create a Docker Hub Registry mirror: 1. Log in to a dedicated machine where the proxy container registry will run. 1. Make sure that [Docker Engine](https://docs.docker.com/get-started/get-docker/) is installed on that machine. 1. Create a new container registry: ```shell docker run -d -p 6000:5000 \ -e REGISTRY_PROXY_REMOTEURL=https://registry-1.docker.io \ --restart always \ --name registry registry:2 ``` You can modify the port number (`6000`) to expose the registry on a different port. This will start the server with `http`. If you want to turn on TLS (`https`) follow the [official documentation](https://distribution.github.io/distribution/about/configuration/#tls). 1. Check the IP address of the server: ```shell hostname --ip-address ``` You should choose the private network IP address. The private network is usually the fastest solution for internal communication between machines on a single provider, like DigitalOcean, AWS, or Azure. Usually, data transferred on a private network is not applied against your monthly bandwidth limit. The Docker Hub registry is accessible under `MY_REGISTRY_IP:6000`. You can now [configure `config.toml`](autoscale.md#distributed-container-registry-mirroring) to use the new registry server. ### Other open source solutions - [`rpardini/docker-registry-proxy`](https://github.com/rpardini/docker-registry-proxy) can proxy most container registries locally, including the GitLab Container Registry. ## Use a distributed cache You can speed up the time it takes to download language dependencies by using a distributed [cache](https://docs.gitlab.com/ci/yaml/#cache). To specify a distributed cache, you set up the cache server and then [configure runner to use that cache server](advanced-configuration.md#the-runnerscache-section). If you are using autoscaling, learn more about the distributed runners [cache feature](autoscale.md#distributed-runners-caching). The following cache servers are supported: - [AWS S3](#use-aws-s3) - [MinIO](#use-minio) or other S3-compatible cache server - [Google Cloud Storage](#use-google-cloud-storage) - [Azure Blob storage](#use-azure-blob-storage) Learn more about GitLab CI/CD [cache dependencies and best practices](https://docs.gitlab.com/ci/caching/). ### Use AWS S3 To use AWS S3 as a distributed cache, [edit runner's `config.toml` file](advanced-configuration.md#the-runnerscaches3-section) to point to the S3 location and provide credentials for connecting. Make sure the runner has a network path to the S3 endpoint. If you use a private subnet with a NAT gateway, to save cost on data transfers you can enable an S3 VPC endpoint. ### Use MinIO Instead of using AWS S3, you can create your own cache storage. 1. Log in to a dedicated machine where the cache server will run. 1. Make sure that [Docker Engine](https://docs.docker.com/get-started/get-docker/) is installed on that machine. 1. Start [MinIO](https://www.min.io), a simple S3-compatible server written in Go: ```shell docker run -d --restart always -p 9005:9000 \ -v /.minio:/root/.minio -v /export:/export \ -e "MINIO_ROOT_USER=" \ -e "MINIO_ROOT_PASSWORD=" \ --name minio \ minio/minio:latest server /export ``` You can modify the port `9005` to expose the cache server on a different port. 1. Check the IP address of the server: ```shell hostname --ip-address ``` 1. Your cache server will be available at `MY_CACHE_IP:9005`. 1. Create a bucket that will be used by the runner: ```shell sudo mkdir /export/runner ``` `runner` is the name of the bucket in that case. If you choose a different bucket, then it will be different. All caches will be stored in the `/export` directory. 1. Use the `MINIO_ROOT_USER` and `MINIO_ROOT_PASSWORD` values (from above) as your Access and Secret Keys when configuring your runner. You can now [configure `config.toml`](autoscale.md#distributed-runners-caching) to use the new cache server. ### Use Google Cloud Storage To use Google Cloud Platform as a distributed cache, [edit runner's `config.toml` file](advanced-configuration.md#the-runnerscachegcs-section) to point to the GCP location and provide credentials for connecting. Make sure the runner has a network path to the GCS endpoint. ### Use Azure Blob storage To use Azure Blob storage as a distributed cache, [edit runner's `config.toml` file](advanced-configuration.md#the-runnerscacheazure-section) to point to the Azure location and provide credentials for connecting. Make sure the runner has a network path to the Azure endpoint. ### Speed up cache and artifact transfers You can improve cache and artifact upload and download performance with the following options. #### Backend-specific runner config Each cache backend has its own `config.toml` section. Optimize for your backend: - [S3 configuration](advanced-configuration.md#the-runnerscaches3-section)): Set `BucketLocation` to the same region as your runners. Use `RoleARN` for archives larger than 5 GB to [enable multipart uploads](advanced-configuration.md#enable-multipart-transfers-with-rolearn). Use the default S3 v2 adapter (do not set `FF_USE_LEGACY_S3_CACHE_ADAPTER=true`). Optionally enable `Accelerate = true` for [AWS S3 Transfer Acceleration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/transfer-acceleration.html) when runners are far from the bucket region. An [S3 VPC endpoint](https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-s3-vpc-endpoint.html) in the same region can reduce latency and cost. - [Google Cloud Storage configuration](advanced-configuration.md#the-runnerscachegcs-section)): Use a bucket in the same or nearest region to your runners. - [Azure Blob configuration](advanced-configuration.md#the-runnerscacheazure-section)): Use a storage account in the same or nearest region to your runners. #### Cache compression Use faster compression to speed up cache archiving and download. This creates larger archives. Set compression options in your job or in [CI/CD variables](https://docs.gitlab.com/ee/ci/variables/): | Variable | Recommended for speed | Description | |----------|------------------------|-------------| | `CACHE_COMPRESSION_LEVEL` | `fastest` or `fast` | Less CPU and faster upload or download. Archives are larger. Default is `default`. | | `CACHE_COMPRESSION_FORMAT` | `zip` | `zip` is often faster to create. `tarzstd` gives better compression ratio but can be slower. | Example configuration in `.gitlab-ci.yml`: ```yaml variables: CACHE_COMPRESSION_LEVEL: fastest CACHE_COMPRESSION_FORMAT: zip ``` #### Cache request timeout If large caches hit timeouts, increase the limit (in minutes) with the `CACHE_REQUEST_TIMEOUT` [CI/CD variable](https://docs.gitlab.com/ee/ci/variables/). Default is `10`. This setting does not speed up transfers but prevents failures on slow or large uploads and downloads. #### Cache transfer buffer size (throughput) Cache download and upload use a single streaming buffer. A larger buffer reduces system calls and often increases throughput, especially if you see transfers cap around 20 to 30 MB/s. Set `CACHE_TRANSFER_BUFFER_SIZE` (in bytes) in the job environment or in [CI/CD variables](https://docs.gitlab.com/ee/ci/variables/). Default is 4 MiB (4194304). Example configuration for 8 MiB: ```yaml variables: CACHE_TRANSFER_BUFFER_SIZE: "8388608" ``` #### Cache chunk size and concurrency Chunk size is the size in bytes of each part or chunk for parallel upload (GoCloud) or parallel download (presigned or GoCloud). Concurrency is how many chunks run in parallel. Memory use is approximately chunk size x concurrency. | Variable | Description | Default | |----------|-------------|---------| | `CACHE_CHUNK_SIZE` | Chunk size in bytes. For upload (GoCloud backends): limits are backend-dependent (for example, 5 MiB to 5 GiB per part, max 10,000 parts for S3; Azure and GCS have their own limits). For download: 0 = legacy sequential; when concurrency > 1, 16 MiB is used if unset. | Upload: 16 MiB (16777216). Download: 0 (legacy) | | `CACHE_CONCURRENCY` | Number of concurrent chunks. Upload: GoCloud backends only (S3 with RoleARN, Azure, GCS). Download: 0 or 1 = legacy sequential mode; values greater than 1 = parallel mode (presigned or GoCloud). | Upload: 16. Download: 0 (legacy) | Example configuration for custom tuning (for example, 32 MiB chunks, 32 concurrent): ```yaml variables: CACHE_CHUNK_SIZE: "33554432" CACHE_CONCURRENCY: "32" ``` #### Artifact uploads to GitLab GitLab sends artifacts to the GitLab coordinator, which might store them in object storage. To speed up the upload from the runner: | Variable | Recommended for speed | Description | |----------|------------------------|-------------| | `ARTIFACT_COMPRESSION_LEVEL` | `fastest` or `fast` | Reduces CPU and time spent compressing before upload. | Set compression options in your job or in CI/CD variables, for example: ```yaml variables: ARTIFACT_COMPRESSION_LEVEL: fastest ``` #### Artifact downloads from object storage When the coordinator redirects artifact downloads to object storage (`direct_download`), you can enable parallel range downloads with the `FF_USE_PARALLEL_ARTIFACT_TRANSFER` [feature flag](feature-flags.md). This is separate from parallel cache transfers (`FF_USE_PARALLEL_CACHE_TRANSFER`). See [Parallel artifact downloads (direct download)](advanced-configuration.md#parallel-artifact-downloads-direct-download). ================================================ FILE: docs/configuration/tls-self-signed.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Self-signed certificates or custom Certification Authorities --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner provides two options to configure certificates to be used to verify TLS peers: - **For connections to the GitLab server**: The certificate file can be specified as detailed in the [Supported options for self-signed certificates targeting the GitLab server](#supported-options-for-self-signed-certificates-targeting-the-gitlab-server) section. This solves the `x509: certificate signed by unknown authority` problem when registering a runner. For existing Runners, the same error can be seen in Runner logs when trying to check the jobs: ```plaintext Couldn't execute POST against https://hostname.tld/api/v4/jobs/request: Post https://hostname.tld/api/v4/jobs/request: x509: certificate signed by unknown authority ``` - **Connecting to a cache server or an external Git LFS store**: A more generic approach which also covers other scenarios such as user scripts, a certificate can be specified and installed on the container as detailed in the [Trusting TLS certificates for Docker and Kubernetes executors](#trusting-tls-certificates-for-docker-and-kubernetes-executors) section. An example job log error concerning a Git LFS operation that is missing a certificate: ```plaintext LFS: Get https://object.hostname.tld/lfs-dev/c8/95/a34909dce385b85cee1a943788044859d685e66c002dbf7b28e10abeef20?X-Amz-Expires=600&X-Amz-Date=20201006T043010Z&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=svcgitlabstoragedev%2F20201006%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-SignedHeaders=host&X-Amz-Signature=012211eb0ff0e374086e8c2d37556f2d8ca4cc948763e90896f8f5774a100b55: x509: certificate signed by unknown authority ``` ## Supported options for self-signed certificates targeting the GitLab server This section refers to the situation where only the GitLab server requires a custom certificate. If other hosts (for example, object storage service without [proxy download enabled](https://docs.gitlab.com/administration/object_storage/#proxy-download)) also require a custom certificate authority (CA), see the [next section](#trusting-tls-certificates-for-docker-and-kubernetes-executors). GitLab Runner supports the following options: - **Default - Read the system certificate**: GitLab Runner reads the system certificate store and verifies the GitLab server against the certificate authorities (CA) stored in the system. - **Specify a custom certificate file**: GitLab Runner exposes the `tls-ca-file` option during [registration](../commands/_index.md#gitlab-runner-register) (`gitlab-runner register --tls-ca-file=/path`), and in [`config.toml`](advanced-configuration.md) under the `[[runners]]` section. This allows you to specify a custom certificate file. This file is read every time the Runner tries to access the GitLab server. If you are using GitLab Runner Helm chart, you must configure certificates as described in [Access GitLab with a custom certificate](../install/kubernetes_helm_chart_configuration.md#access-gitlab-with-a-custom-certificate). - **Read a PEM certificate**: GitLab Runner reads the PEM certificate (**DER format is not supported**) from a predefined file: - `/etc/gitlab-runner/certs/gitlab.example.com.crt` on \*nix systems when GitLab Runner is executed as `root`. If your server address is `https://gitlab.example.com:8443/`, create the certificate file at: `/etc/gitlab-runner/certs/gitlab.example.com.crt`. You can use the `openssl` client to download the GitLab instance's certificate to `/etc/gitlab-runner/certs`: ```shell openssl s_client -showcerts -connect gitlab.example.com:443 -servername gitlab.example.com < /dev/null 2>/dev/null | openssl x509 -outform PEM > /etc/gitlab-runner/certs/gitlab.example.com.crt ``` To verify that the file is correctly installed, you can use a tool like `openssl`. For example: ```shell echo | openssl s_client -CAfile /etc/gitlab-runner/certs/gitlab.example.com.crt -connect gitlab.example.com:443 -servername gitlab.example.com ``` - `~/.gitlab-runner/certs/gitlab.example.com.crt` on \*nix systems when GitLab Runner is executed as non-`root`. - `./certs/gitlab.example.com.crt` on other systems. If running GitLab Runner as a Windows service, this does not work. Specify a custom certificate file instead. Notes: - If your GitLab server certificate is signed by your CA, use your CA certificate (not your GitLab server signed certificate). You might need to add the intermediates to the chain as well. For example, if you have a primary, intermediate, and root certificate, you can put all of them into one file: ```plaintext -----BEGIN CERTIFICATE----- (Your primary SSL certificate: your_domain_name.crt) -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- (Your intermediate certificate) -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- (Your root certificate) -----END CERTIFICATE----- ``` - If you are updating the certificate for an existing Runner, [restart it](../commands/_index.md#gitlab-runner-restart). - If you already have a Runner configured through HTTP, update your instance path to the new HTTPS URL of your GitLab instance in your `config.toml`. - As a temporary and insecure workaround, to skip the verification of certificates, in the `variables:` section of your `.gitlab-ci.yml` file, set the CI variable `GIT_SSL_NO_VERIFY` to `true`. ### Git cloning The Runner injects missing certificates to build the CA chain by using `CI_SERVER_TLS_CA_FILE`. This allows `git clone` and artifacts to work with servers that do not use publicly trusted certificates. This approach is secure, but makes the Runner a single point of trust. ## Trusting TLS certificates for Docker and Kubernetes executors Consider the following information when you register a certificate on a container: - The [**user image**](https://docs.gitlab.com/ci/yaml/#image), which is used to run the user script. For scenarios that involve trusting the certificate for user scripts, the user must take ownership regarding how to install a certificate. Certificate installation procedures can vary based on the image. The Runner has no way of knowing how to install a certificate in each possible scenario. - The [**Runner helper image**](advanced-configuration.md#helper-image), which is used to handle Git, artifacts, and cache operations. For scenarios that involve trusting the certificate for other CI/CD stages, the user only needs to make a certificate file available at a specific location (for example, `/etc/gitlab-runner/certs/ca.crt`), and the Docker container will automatically install it for the user. ### Trusting the certificate for user scripts If your build uses TLS with a self-signed certificate or custom certificate, install the certificate in your build job for peer communication. The Docker container running the user scripts doesn't have the certificate files installed by default. This might be required to use a custom cache host, perform a secondary `git clone`, or fetch a file through a tool like `wget`. To install the certificate: 1. Map the necessary files as a Docker volume so that the Docker container that runs the scripts can see them. Do this by adding a volume inside the respective key inside the `[runners.docker]` in the `config.toml` file, for example: - **Linux**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "ubuntu:latest" # Add path to your ca.crt file in the volumes list volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` 1. **Linux-only**: Use the mapped file (for example, `ca.crt`) in a [`pre_build_script`](advanced-configuration.md#the-runners-section) that: 1. Copies it to `/usr/local/share/ca-certificates/ca.crt` inside the Docker container. 1. Installs it by running `update-ca-certificates --fresh`. For example (commands vary based on the distribution you're using): - On Ubuntu: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" # Copy and install CA certificate before each job pre_build_script = """ apt-get update -y > /dev/null apt-get install -y ca-certificates > /dev/null cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates --fresh > /dev/null """ ``` - On Alpine: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" # Copy and install CA certificate before each job pre_build_script = """ apk update >/dev/null apk add ca-certificates > /dev/null rm -rf /var/cache/apk/* cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates --fresh > /dev/null """ ``` If you just need the GitLab server CA cert that can be used, you can retrieve it from the file stored in the `CI_SERVER_TLS_CA_FILE` variable: ```shell curl --cacert "${CI_SERVER_TLS_CA_FILE}" ${URL} -o ${FILE} ``` ### Trusting the certificate for the other CI/CD stages You can map a certificate file to `/etc/gitlab-runner/certs/ca.crt` on Linux, or `C:\GitLab-Runner\certs\ca.crt` on Windows. The Runner helper image installs this user-defined `ca.crt` file at start-up, and uses it when performing operations like cloning and uploading artifacts, for example. #### Docker - **Linux**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "ubuntu:latest" # Add path to your ca.crt file in the volumes list volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` - **Windows**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "mcr.microsoft.com/windows/servercore:21H2" # Add directory holding your ca.crt file in the volumes list volumes = ["c:\\cache", "c:\\path\\to-ca-cert-dir:C:\\GitLab-Runner\\certs:ro"] ``` #### Kubernetes To provide a certificate file to jobs running in Kubernetes: 1. Store the certificate as a Kubernetes secret in your namespace: ```shell kubectl create secret generic --namespace --from-file= ``` 1. Mount the secret as a volume in your runner, replacing `` and `` with appropriate values: ```toml gitlab-runner: runners: config: | [[runners]] [runners.kubernetes] namespace = "{{.Release.Namespace}}" image = "ubuntu:latest" [[runners.kubernetes.volumes.secret]] name = "" mount_path = "" ``` The `mount_path` is the directory in the container where the certificate is stored. If you used `/etc/gitlab-runner/certs/` as the `mount_path` and `ca.crt` as your certificate file, your certificate is available at `/etc/gitlab-runner/certs/ca.crt` inside your container. 1. As part of the job, install the mapped certificate file to the system certificate store. For example, in an Ubuntu container: ```yaml script: - cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ - update-ca-certificates ``` The Kubernetes executor's handling of the helper image's `ENTRYPOINT` has a [known issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28484). When a certificate file is mapped, it isn't automatically installed to the system certificate store. ## Troubleshooting Refer to the general [SSL troubleshooting](https://docs.gitlab.com/omnibus/settings/ssl/ssl_troubleshooting/) documentation. In addition, you can use the [`tlsctl`](https://gitlab.com/gitlab-org/ci-cd/runner-tools/tlsctl) tool to debug GitLab certificates from the Runner's end. ### Error: `x509: certificate signed by unknown authority` This error can occur while trying to pull executor images from private registry when the Docker host or Kubernetes node where the runner schedules the executors does not trust the private registry's certificate. To fix the error, add the relevant root certificate authority or certificate chain to the system's trust store and restart the container service. If you're on Ubuntu or Alpine, run the following commands: ```shell cp ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates systemctl restart docker.service ``` For operating systems other than Ubuntu or Alpine, see your operating system's documentation to find appropriate commands to install the trusted certificate. Depending on your version of GitLab Runner and the Docker host environment, you might also have to disable the `FF_RESOLVE_FULL_TLS_CHAIN` feature flag. ### `apt-get: not found` errors in jobs The [`pre_build_script`](advanced-configuration.md#the-runners-section) commands are executed before every job a runner executes. Distribution-specific commands like `apk` or `apt-get` can cause issues. When you install a certificate for user scripts, your CI jobs might fail if they use [images](https://docs.gitlab.com/ci/yaml/#image) based on different distributions. For example, if your CI jobs run Ubuntu and Alpine images, Ubuntu commands fail on Alpine. The `apt-get: not found` error occurs in jobs with Alpine-based images. To resolve this issue, do one of the following: - Write your `pre_build_script` so that it is distribution-independent. - Use [tags](https://docs.gitlab.com/ci/yaml/#tags) to ensure runners only pick up jobs with compatible images. ### Error: `self-signed certificate in certificate chain` CI/CD jobs fail with the following error: ```plaintext fatal: unable to access 'https://gitlab.example.com/group/project.git/': SSL certificate problem: self-signed certificate in certificate chain ``` However, the [OpenSSL debugging commands](https://docs.gitlab.com/omnibus/settings/ssl/ssl_troubleshooting/#useful-openssl-debugging-commands) do not detect any errors. This error might occur when Git connects through a proxy that `openssl s_client` troubleshooting commands do not use by default. To verify if Git uses a proxy to fetch the repository, enable debugging: ```yaml variables: GIT_CURL_VERBOSE: 1 ``` To prevent Git from using the proxy, set the `NO_PROXY` variable to include your GitLab hostname: ```yaml variables: NO_PROXY: gitlab.example.com ``` ================================================ FILE: docs/development/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Contribute to GitLab Runner development --- GitLab Runner is a Go binary which can operate in two modes: 1. GitLab Runner executing jobs locally ("instance" executor). 1. Runner manager delegating jobs to an autoscaled environment which uses GitLab Runner Helper to pull artifacts. For developing GitLab Runner in instance executor mode (1), the only setup required is a working Go environment. For developing GitLab Runner in Manager and Helper mode (2), setup also requires a Docker build environment. Additionally, running the Manager or Helper in Kubernetes requires a working cluster. The following instructions setup your Go environment using `mise` to manage the Go version. If you already have this or otherwise know what you're doing, you can skip step 2 ("Install dependencies and Go runtime"). In order to provide Docker and Kubernetes locally Step 3 has you setting Rancher Desktop. If you don't need one or both you can skip step 3 ("Install Rancher Desktop") or just disable `k3s` (Kubernetes) in Rancher Desktop. ## Recommended Environment The recommended environment on which to install Go and Rancher Desktop for development is a local laptop or desktop. It is possible to use nested-virtualization to run Rancher Desktop in the cloud (which runs `k3s` in a VM) but it's more tricky to set up. ## Runner Shorts Video Tutorials You can also follow along with the Runner Shorts (~20 minute videos) on setting up and making a change: 1. Please read the [recommended environment](#recommended-environment) section above before beginning 1. [Setting up a GitLab Runner development environment](https://www.youtube.com/watch?v=-KlaXpUdJOI) 1. [Code walkthrough of GitLab Runner](https://www.youtube.com/watch?v=pEtfmZ0Ssc4) 1. [Making and testing locally a GitLab Runner change](https://www.youtube.com/watch?v=45H4WIuu8Fc) ## 1. Clone GitLab Runner ```shell git clone https://gitlab.com/gitlab-org/gitlab-runner.git ``` If you are developing for GitLab Runner in autoscaled mode (Manager and Helper) you might want to check out one or more of Taskscaler, Fleeting and associated plugins. To make local changes from one package visible to the others, use Go workspaces. ```shell git clone https://gitlab.com/gitlab-org/fleeting/taskscaler.git git clone https://gitlab.com/gitlab-org/fleeting/fleeting.git git clone https://gitlab.com/gitlab-org/fleeting/fleeting-plugin-aws.git git clone https://gitlab.com/gitlab-org/fleeting/fleeting-plugin-googlecompute.git go work init go work use gitlab-runner go work use taskscaler go work use fleeting go work use fleeting-plugin-aws go work use fleeting-plugin-googlecompute ``` ## 2. Install dependencies and Go runtime The GitLab Runner project uses [`mise`](https://mise.jdx.dev/) to manage dependencies. The simplest way to get your development environment setup is to use `mise`. {{< tabs >}} {{< tab title="mise" >}} ```shell cd gitlab-runner mise install ``` {{< /tab >}} {{< tab title="Debian/Ubuntu" >}} ```shell sudo apt-get install -y mercurial git-core wget make build-essential wget https://storage.googleapis.com/golang/go1.26.1.linux-amd64.tar.gz sudo tar -C /usr/local -xzf go*-*.tar.gz export PATH="$(go env GOBIN):$PATH" YQ_BINARY="yq_$(go env GOHOSTOS)_$(go env GOHOSTARCH).tar.gz" wget https://github.com/mikefarah/yq/releases/download/latest/${YQ_BINARY}.tar.gz sudo tar -C /usr/local -xzf ${YQ_BINARY}.tar.gz ``` {{< /tab >}} {{< tab title="CentOS" >}} ```shell sudo yum install mercurial wget make sudo yum groupinstall 'Development Tools' wget https://storage.googleapis.com/golang/go1.26.1.linux-amd64.tar.gz sudo tar -C /usr/local -xzf go*-*.tar.gz export PATH="$(go env GOBIN):$PATH" YQ_BINARY="yq_$(go env GOHOSTOS)_$(go env GOHOSTARCH).tar.gz" wget https://github.com/mikefarah/yq/releases/download/latest/${YQ_BINARY}.tar.gz sudo tar -C /usr/local -xzf ${YQ_BINARY}.tar.gz ``` {{< /tab >}} {{< tab title="macOS" >}} Using binary package: ```shell wget https://storage.googleapis.com/golang/go1.26.1.darwin-amd64.tar.gz sudo tar -C /usr/local -xzf go*-*.tar.gz export PATH="$(go env GOBIN):$PATH" YQ_BINARY="yq_$(go env GOHOSTOS)_$(go env GOHOSTARCH).tar.gz" wget https://github.com/mikefarah/yq/releases/download/latest/${YQ_BINARY}.tar.gz sudo tar -C /usr/local -xzf ${YQ_BINARY}.tar.gz ``` Using installation package: ```shell wget https://storage.googleapis.com/golang/go1.26.1.darwin-amd64.pkg open go*-*.pkg export PATH="$(go env GOBIN):$PATH" YQ_BINARY="yq_$(go env GOHOSTOS)_$(go env GOHOSTARCH).tar.gz" wget https://github.com/mikefarah/yq/releases/download/latest/${YQ_BINARY}.tar.gz sudo tar -C /usr/local -xzf ${YQ_BINARY}.tar.gz ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell pkg install go-1.26.1 gmake git mercurial export PATH="$(go env GOBIN):$PATH" YQ_BINARY="yq_$(go env GOHOSTOS)_$(go env GOHOSTARCH).tar.gz" wget https://github.com/mikefarah/yq/releases/download/latest/${YQ_BINARY}.tar.gz sudo tar -C /usr/local -xzf ${YQ_BINARY}.tar.gz ``` {{< /tab >}} {{< /tabs >}} ## 3. Install Rancher Desktop The Docker Engine is required to create a pre-built image that is embedded into GitLab Runner and loaded when using Docker executor. A local Kubernetes cluster is helpful for developing Kubernetes executor. Rancher Desktop provides both. To install Rancher Desktop, follow the [installation instructions](https://docs.rancherdesktop.io/getting-started/installation/) for your OS. > Be sure to configure Rancher Desktop to use `dockerd (moby)` and have `Administrative Access` enabled. > > ![rancher-configuration](img/rancher-configuration.png) ## 4. Install GitLab Runner dependencies ```shell make deps mise reshim ``` **For FreeBSD use `gmake deps`** ## 5. Build GitLab Runner Compile GitLab Runner using the Go toolchain: ```shell make runner-and-helper-bin-host ``` `make runner-and-helper-bin-host` is a superset of `make runner-bin-host` which in addition takes care of building the Runner Helper Docker archive dependencies. ## 6. Run GitLab Runner ```shell ./out/binaries/gitlab-runner run ``` You can use the any of the usual command-line arguments (including `--debug`): ```shell ./out/binaries/gitlab-runner --debug run ``` ### Building the Docker images If you want to build the Docker images, run `make runner-and-helper-docker-host`, which will: 1. Build `gitlab-runner-helper` and create a helper Docker image from it. 1. Compile GitLab Runner for `linux/amd64`. 1. Build a DEB package for Runner. The official GitLab Runner images are based on Alpine and Ubuntu, and the Ubuntu image build uses the DEB package. 1. Build the Alpine and Ubuntu versions of the `gitlab/gitlab-runner` image. ### New auto-scaling (Taskscaler) in GitLab Runner (since 15.6.0) The [Next Runner Auto-scaling Architecture](https://handbook.gitlab.com/handbook/engineering/architecture/design-documents/runner_scaling/#taskscaler-provider) adds a new mechanism for autoscaling which will work with all environments. It will replace all current autoscaling mechanisms (e.g. Docker Machine). This new mechanism is in a pre-alpha state and actively being developed. There are two new libraries being used in GitLab Runner: 1. [Taskscaler](https://gitlab.com/gitlab-org/fleeting/taskscaler) 1. [Fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting) You don't need to check out these libraries to use GitLab Runner at HEAD, but some development in the autoscaling space may take place there. In addition Taskscaler and Fleeting, there are a number of Fleeting Plugins which adapt GitLab Runner to a specific cloud providers (e.g. Google Computer or AWS EC2). The written instructions above ("Clone GitLab Runner") show how to check out the code and the videos ("Runner Shorts") show how to use it. These instructions show how to use GitLab Runner with a plugin. Each plugin will come with instructions on how to build the binary and configure the underlying instance group. This work is being done in [this issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29400). The canonical build and configuration instructions will live with each plugin, but in the meantime, here are some general instructions. #### Build the plugin To run GitLab Runner with a plugin, generate an executable binary and place it on your system's `PATH`. To generate the binary, ensure `$GOPATH/bin` is on your `PATH`, then use `go install`. Each plugin contains a path to `./cmd/`. For example, from the `fleeting-plugin-aws` directory: ```shell cd cmd/fleeting-plugin-aws/ go install ``` If you manage go versions with mise, run this command after the binary generates: ```shell mise reshim ``` #### Use the plugin GitLab Runner is started in the usual way but specifies an `instance` executor. It also specifies under `plugin_config` and `connector_config` an Instance Group, its location, and some details about how to connect to the underlying instances. GitLab Runner should find the Instance Group and create an initial number of idle VMs. When a job is picked up the configured instance runner, it will consume a running VM and replace it via AWS service calls in the `fleeting-plugin-aws` plugin. ```toml [[runners]] name = "local-taskrunner" url = "https://gitlab.com/" token = "REDACTED" executor = "instance" shell = "bash" [runners.autoscaler] max_use_count = 1 max_instances = 20 plugin = "fleeting-plugin-aws" # Fleeting plugin name as built above [1]. [runners.autoscaler.plugin_config] credentials_file = "/Users/josephburnett/.aws/credentials". # Credentials which can scale an Autoscaling Group (ASG) [2]. name = "jburnett-taskrunner-asg" # ASG name. project = "jburnett-ad8e5d54" # ASG project. region = "us-east-2" # ASG region. [runners.autoscaler.connector_config] username = "ubuntu" # ASG instance template username for login. [[runners.autoscaler.policy]] idle_count = 5 idle_time = 0 scale_factor = 0.0 scale_factor_limit = 0 ``` If you terminate GitLab Runner with SIGTERM you may see some of these processes hanging around. Instead terminate with SIGQUIT. Note that ASGs should have autoscaling disabled. GitLab Runner takes care of autoscaling via the Taskscaler library. ## 7. Run test suite locally GitLab Runner test suite consists of "core" tests and tests for executors. Tests for executors require certain binaries to be installed on your local machine. Some of these binaries cannot be installed on all operating systems. If a binary is not installed tests requiring this binary will be skipped. These are the binaries that you can install: 1. [VirtualBox](https://www.virtualbox.org/wiki/Downloads) and [Vagrant](https://developer.hashicorp.com/vagrant/install); the [Vagrant Parallels plugin](https://github.com/Parallels/vagrant-parallels) is also required 1. [kubectl](https://kubernetes.io/docs/tasks/tools/) with [minikube](https://github.com/kubernetes/minikube) 1. [Parallels Pro or Business edition](https://www.parallels.com/products/desktop/) 1. [PowerShell](https://learn.microsoft.com/en-us/powershell/) After installing the binaries run: ```shell make development_setup ``` To execute the tests run: ```shell make test ``` ### Kubernetes Integration tests To run correctly, some Kubernetes integration tests require specific configuration or runtime arguments of the Kubernetes cluster they run against. These tests will be skipped if the cluster configuration is incorrect. Below is a sample configuration for Kubernetes clusters that would commonly be used on a developer workstation: - `minikube` ```shell minikube delete minikube config set container-runtime containerd minikube config set feature-gates "ProcMountType=true" minikube start ``` - `k3s` ```shell k3s server --tls-san=k3s --kube-apiserver-arg=feature-gates=ProcMountType=true ``` ## 8. Run tests with helper image version of choice If you are developing functionality inside a helper, you'll most likely want to run tests with the version of the Docker image that contains the newest changes. If you run tests without passing `-ldflags`, the default version in `version.go` is `development`. This means that the runner defaults to pulling a [helper image](../configuration/advanced-configuration.md#helper-image) with the `latest` tag. ### Make targets `make` targets inject `-ldflags` automatically. You can run all tests by using: ```shell make simple-test ``` `make` targets also inject `-ldflags` for `parallel_test_execute`, which is most commonly used by the CI/CD jobs. ### Custom `go test` arguments In case you want a more customized `go test` command, you can use `print_test_ldflags` as `make` target: ```shell go test -ldflags "$(make print_test_ldflags)" -run TestDockerCommandBuildCancel -v ./executors/docker/... ``` ### In GoLand Currently, GoLand doesn't support dynamic Go tool arguments, so you'll need to run `make print_ldflags` first and then paste it in the configuration. > [!note] > To use the debugger, make sure to remove the last two flags (`-s -w`). ### Local Docker images for runner and helper You can build runner and helper as a local Docker image using these commands: ```shell make runner-local-image # build only gitlab-runner:local make helper-local-image # build only gitlab-runner-helper:local make runner-and-helper-local-image # build both ``` After the build completes, you can use the images locally. ```shell docker image ls REPOSITORY TAG IMAGE ID CREATED SIZE gitlab-runner-helper local 1e0064619625 5 minutes ago 92.2MB gitlab-runner local 1261a052d4ad 5 minutes ago 195MB ``` ### Helper image with Kubernetes To use local images in a local Kubernetes cluster, you must set your Docker context appropriately. For example, with minikube: ```shell eval $(minikube docker-env) make runner-and-helper-local-image ``` ### Customization of the local images The targets focus on convenience, not completeness. Not all available runner and helper configurations can be created with these `make` targets. The targets support only Linux image creation. The target architecture defaults to the host machine architecture. The base runner image version defaults to the version specified in the CI/CD configuration. You can test variations by setting environment variables. For guidance on possible values, check the available base images in [the base images container registry](https://gitlab.com/gitlab-org/ci-cd/runner-tools/base-images/container_registry). Examples: ```shell # Make an ubuntu-based runner and helper LOCAL_FLAVOR=ubuntu make runner-and-helper-local-image # Specify a version and flavor RUNNER_IMAGES_VERSION=0.0.1 LOCAL_FLAVOR=ubuntu make runner-and-helper-local-image # make an ubuntu helper image with pwsh # NOTE: This flavor is only supported for the helper, not the # runner, and only on amd64 LOCAL_FLAVOR=ubuntu-pwsh LOCAL_ARCH=amd64 make helper-local-image ``` While these environment variables provide flexibility, the targets do not protect you from invalid configurations. For production scenarios, use images that the CI/CD pipeline creates. ## 9. Install optional tools - Install `golangci-lint`, used for the `make lint` target. - Install `markdown-lint` and `vale`, used for the `make lint-docs` target. Installation instructions will pop up when running a Makefile target if a tool is missing. ## 10. Contribute You can start hacking `gitlab-runner` code. If you need an IDE to edit and debug code, there are a few free suggestions you can use: - [JetBrains GoLand IDE](https://www.jetbrains.com/go/). - Visual Studio Code using the [workspace recommended extensions](https://code.visualstudio.com/docs/configure/extensions/extension-marketplace#_workspace-recommended-extensions), located in `.vscode/extensions.json`. ## Managing build dependencies GitLab Runner uses [Go Modules](https://github.com/golang/go/wiki/Modules) to manage its dependencies. Don't add dependency from upstream default branch when version tags are available. ## Tests The Runner codebase makes a distinction between [unit](https://en.wikipedia.org/wiki/Unit_testing) and [integration tests](https://en.wikipedia.org/wiki/Integration_testing) in the following way: - Unit test files have a suffix of `_test.go` and contain the following build directive in the header: ```go // go:build !integration ``` - Integration test files have a suffix of `_integration_test.go` and contain the following build directive in the header: ```go // go:build integration ``` They can be run by adding `-tags=integration` to the `go test` command. To test the state of the build directives in test files, `make check_test_directives` can be used. ### Running shell integration tests with custom credentials To run these tests locally with your own credentials, set an environment variable: ```shell export GITLAB_TEST_TOKEN="your-access-token" ``` Use either a personal or group access token with one of `read_repository`, `read_api`, or `api` permission. If you don't have access to the projects under [`gitlab-runner-pipeline-tests`](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests), you can update the test URLs to point to your own project where your token has the required permissions. The project should be private and use a private repository as a submodule. For example, to run the `TestGitIncludePaths` test: ```shell go test -count=1 -v -run TestGitIncludePaths --tags=integration ./executors/shell ``` ## Developing for Windows on a non-windows environment We provide a [Vagrantfile](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/Vagrantfile) to help you run a Windows Server 2019 or Windows 10 instance, since we are using [multiple machines](https://developer.hashicorp.com/vagrant/docs/multi-machine) inside of Vagrant. The following are required: - [Vagrant](https://developer.hashicorp.com/vagrant) installed. - [Virtualbox](https://www.virtualbox.org/) installed. - Around 30GB of free hard disk space on your computer. Which virtual machine to use depends on your use case: - The Windows Server machine has Docker pre-installed and should always be used when you are developing on GitLab Runner for Windows. - The Windows 10 machine is there for you to have a windows environment with a GUI which sometimes can help you debugging some Windows features. Note that you cannot have Docker running inside of Windows 10 because nested virtualization is not supported. Running `vagrant up windows_10` will start the Windows 10 machine for you. To: - SSH inside of the Windows 10 machine, run `vagrant ssh windows_10`. - Access the GUI for the Windows 10, you can connect via RDP by running `vagrant rdp windows_10`, which will connect to the machine using a locally installed RDP program. For both machines, the GitLab Runner source code is synced bi-directionally so that you can edit from your machine with your favorite editor. The source code can be found under the `$GOROOT` environment variable. We have a `RUNNER_SRC` environment variable which you can use to find out the full path so when using PowerShell, you can use `cd $Env:RUNNER_SRC`. ## Other resources 1. [Reviewing GitLab Runner merge requests](reviewing-gitlab-runner.md) 1. [Add support for new Windows Version](add-windows-version.md) 1. [Runner Group - Team Resources](https://handbook.gitlab.com/handbook/engineering/devops/runner/team-resources/#overview) ================================================ FILE: docs/development/add-windows-version.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Add Docker executor support for a Windows version --- GitLab supports [specific versions of Windows](../install/support-policy.md#windows-version-support). To add support for a new Windows version for the [Docker executor](../executors/docker.md), you must release a [helper image](../configuration/advanced-configuration.md#helper-image) with the same Windows version. Then you can run the helper image on the Windows host OS. To build the helper image for the version, you need GitLab Runner installed on that Windows version, because Windows requires your host OS and container OS versions to match. ## Infrastructure We must build the helper image for it to be used for the user job. ### Create a base image for infrastructure to use To add support for a new Windows version, you might need to create a new helper image. Windows versions can run older helper images (backward compatibility), or might require a newly built helper image. For compatibility details, see [Windows container version compatibility](https://learn.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility) To support a new host OS environment or helper image, update the [windows-containers](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/gcp/windows-containers) repository to build a base image. The [autoscaler](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/autoscaler) uses the base image to build the GitLab Runner helper image. For example, when adding support for Windows Server 2025, backward compatibility allowed reuse of the existing 2022 helper images. However, when adding support to Windows Server 2022, the Windows Server 2019 helper image was not compatible with process isolation, so a new image was required. Some GCP base images require Docker installation during the build process. To update the CI/CD environment for a new image, update the following files: - `.gitlab-ci.yml` - `.gitlab/ci/build.gitlab-ci.yml` ### Test the image generated We recommend testing the image generated in the `dev` step. It is likely to be named `dev xxx` where `xxx` stands for the windows server version. To test the image, the following steps can be followed: 1. Add support for the new windows server version in [`GitLab Runner project`](https://gitlab.com/gitlab-org/gitlab-runner) and generate the `gitlab-runner-helper.x86_64-windows.exe` binary. 1. Create a VM using the disk image generated during the `dev` step. When adding support for `windows server ltsc2022`, the disk image name was [`runners-windows-21h1-core-containers-dev-40-mr`](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/gcp/windows-containers/-/jobs/2333691567#L697) 1. Generate the `gitlab-runner-helper` Docker image from this VM. To do so, you need to download the `gitlab-runner-helper.x86_64-windows.exe` binary on the VM. As the `Invoke-WebRequest` PowerShell command might be unavailable, you should use the `Start-BitsTransfer` command instead. 1. Create another VM using the new GCP windows server image to support. 1. Install the `gitlab-runner` executable generated for the previously update `GitLab-Runner` project and register it to a project. 1. Successfully launch a job. An example of this procedure is summarized in [this comment](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/gcp/windows-containers/-/merge_requests/40#note_910281106). ### Publish the image After we merge the merge request created from the [previous step](#create-a-base-image-for-infrastructure-to-use), we need to run the [publish job](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/gcp/windows-containers/-/blob/120b30096b2db7bb445f69b1923e161b10b589e6/.gitlab/ci/build.gitlab-ci.yml#L155-166) manually for the image to be published to our production GCP project. Take note of the image name that is created from the `publish` job, for example in [this job](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/gcp/windows-containers/-/jobs/643514801) we created an image called `runners-windows-2019-core-containers-2020-07-17`. This will be used for the [install part](#install). ### Add two new runner managers At this point we should have a base image ready in our production environment, so we can use it inside the CI pipeline for the GitLab Runner project. The only thing that is left is to set up the Runner Managers. #### Register Run [`gitlab-runner register`](../register/_index.md) to register the two new runners. These should be project-specific runners, so we need to use the registration token from the [project settings](https://gitlab.com/gitlab-org/gitlab-runner/-/settings/ci_cd). The name of the runner should follow the same naming convention as the existing ones. For example, for `Windows Server Core 2004` we should name the Runner Managers the following: 1. `windows-2004-private-runner-manager-1` 1. `windows-2004-private-runner-manager-2` Once registered, make sure you safely store the runner tokens found in the `config.toml` file since we are going to need these for the [installation](#install) step. Finally, we'll need to assign the new Runner Managers to the [security](https://gitlab.com/gitlab-org/security/gitlab-runner) fork project and to the ['liveness' test support](https://gitlab.com/gitlab-org/ci-cd/tests/liveness) project. So for each of the new Runner Managers: 1. Go to the Runners section of the [Runner project CI/CD settings page](https://gitlab.com/gitlab-org/gitlab-runner/-/settings/ci_cd); 1. Unlock the new Runner by editing its properties and unchecking `Lock to current projects`; 1. For the [security](https://gitlab.com/gitlab-org/security/gitlab-runner) fork project: 1. Go to the Runners section of the [project's CI/CD settings page](https://gitlab.com/gitlab-org/security/gitlab-runner/-/settings/ci_cd); 1. Scroll down to the `Other available runners` section and enable the runner for this project; 1. For the ['liveness' test support](https://gitlab.com/gitlab-org/ci-cd/tests/liveness) project: 1. Go to the Runners section of the [project's CI/CD settings page](https://gitlab.com/gitlab-org/ci-cd/tests/liveness/-/settings/ci_cd); 1. Scroll down to the `Other available runners` section and enable the runner for this project; 1. Lock the Runner back again in the [Runner project CI/CD settings page](https://gitlab.com/gitlab-org/gitlab-runner/-/settings/ci_cd). #### Install Install a new instance of [autoscaler](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/autoscaler) to have a specific `config.toml` for that Windows version. We need to update our Ansible repository (`https://ops.gitlab.net/gitlab-com/gl-infra/ci-infrastructure-windows`) to include the new Windows version. For example, if we want to add support for `Windows Server Core 2004` in the 13.7 milestone we can see this merge request: `https://ops.gitlab.net/gitlab-com/gl-infra/ci-infrastructure-windows/-/merge_requests/70`, where we update the following files: 1. `ansible/roles/runner/tasks/main.yml` 1. `ansible/roles/runner/tasks/autoscaler.yml` 1. `ansible/group_vars/gcp_role_runner_manager.yml` 1. `ansible/host_vars/windows-shared-runners-manager-1.yml` 1. `ansible/host_vars/windows-shared-runners-manager-2.yml` When opening a merge request make sure that the maintainer is aware that they need to [register](#register) 2 new runners and save them inside the CI/CD variables with the keys defined in `ansible/host_vars`. ## Publish `registry.gitlab.com/gitlab-org/ci-cd/tests/liveness` The image `registry.gitlab.com/gitlab-org/ci-cd/tests/liveness` is used as part of the CI process for [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner). Make sure that an image based on the new Windows version is published. For example, if we want to add support for `Windows Server Core 2004` in the 13.7 milestone we can see the following [merge request](https://gitlab.com/gitlab-org/ci-cd/tests/liveness/-/merge_requests/4), where we update the following files: 1. `.gitlab-ci.yml` 1. `Makefile` ## Update GitLab Runner to support specific Windows version Since we need to provide a helper image for users to be able to use the Docker executor we have specific checks inside the code base, we need to allow the new Windows version. We should update the following: 1. [List of support versions](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v13.4.1/helpers/container/windows/version.go#L38-42), and tests surrounding it. 1. [List of base images](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v13.4.1/helpers/container/helperimage/windows_info.go#L10-21), and tests surrounding it. 1. [Update GitLab CI to run tests on the default branch](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v13.4.1/.gitlab/ci/test.gitlab-ci.yml#L176-180). 1. [Update the `release` stage](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/v13.4.1/.gitlab-ci.yml#L8). For example, if we want to add support for `Windows Server Core 2004` in the 13.7 milestone we can see the following [merge request](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/2459), where we update the following files: 1. `helpers/container/helperimage/windows_info.go` 1. `helpers/container/helperimage/windows_info_test.go` 1. `helpers/container/windows/version.go` 1. `helpers/container/windows/version_test.go` 1. `.gitlab/ci/test.gitlab-ci.yml` 1. `.gitlab/ci/coverage.gitlab-ci.yml` 1. `.gitlab/ci/_common.gitlab-ci.yml` 1. `.gitlab/ci/release.gitlab-ci.yml` 1. `ci/.test-failures.servercore2004.txt` 1. `docs/executors/docker.md` ================================================ FILE: docs/development/internal/ci/kubernetes_integration_tests.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Kubernetes integration tests --- Kubernetes integration tests run in GitLab Runner's CI/CD pipeline. These tests verify that the GitLab Runner works correctly with Kubernetes clusters. These tests run against a dedicated Kubernetes cluster managed by the [runner-Kubernetes-infra](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra) repository. ## Test infrastructure ### Runner Kubernetes infrastructure repository The test infrastructure is hosted at: - Repository: - Purpose: Manages dedicated Kubernetes clusters for GitLab Runner integration testing - Cluster: `runner-k8s` in GCP (see internal documentation for project details and zone) The infrastructure uses a blue-green deployment model with two separate clusters to enable zero-downtime updates. ### Cluster configuration For detailed cluster configuration including node pools, resource limits, and autoscaling settings, see the [cluster configuration](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#cluster-configuration) section in the infrastructure repository. ## Pipeline structure ### Test pipeline stages The integration tests run through the following GitLab CI/CD stages: 1. Provision integration Kubernetes (`provision integration kubernetes`): - Provisions test-specific RBAC resources - Creates service account `k8s-runner-integration-tests-runner-$CI_PIPELINE_ID` - Executes `mage k8s:provisionIntegrationKubernetes $CI_PIPELINE_ID` 1. Integration test jobs (parallel execution): - `integration kubernetes`: Standard integration tests - `integration kubernetes exec legacy`: Tests with legacy execution strategy - `integration kubernetes attach`: Tests with attach execution strategy 1. Cleanup (`destroy integration kubernetes`): - Destroys test-specific resources - Executes `mage k8s:destroyIntegrationKubernetes $CI_PIPELINE_ID` ### Pipeline configuration The pipeline is defined in `.gitlab/ci/test-kubernetes-integration.gitlab-ci.yml`: ```yaml .integration kubernetes: extends: - .rules:merge_request_pipelines:no_docs:no-community-mr tags: - $KUBERNETES_RUNNER_INTEGRATION_TAG stage: test kubernetes integration variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: "k8s-runner-integration-tests-runner-$CI_PIPELINE_ID" ``` ### Test execution Integration tests are executed using `gotestsum`: ```shell gotestsum --format=testname --format-hide-empty-pkg --rerun-fails=3 \ --hide-summary=output --packages=gitlab.com/gitlab-org/gitlab-runner/executors/kubernetes \ --junitfile=junit_report.xml --junitfile-hide-empty-pkg -- \ -timeout=10m -parallel=20 $EXTRA_GO_TEST_FLAGS \ -tags=integration,kubernetes ./executors/kubernetes/... ``` Key parameters: - Timeout: 10 minutes per test - Parallel execution: Up to 20 tests simultaneously - Retry logic: Failing tests are retried up to 3 times - Build tags: `integration,kubernetes` ## Test categories ### Standard integration tests - Job: `integration kubernetes` - Purpose: Main integration test suite - Feature flags: Uses default feature flag configuration - Filter: Excludes feature flag-specific tests with `-skip=TestRunIntegrationTestsWithFeatureFlag` ### Legacy execution strategy tests - Job: `integration kubernetes exec legacy` - Feature flag: `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=true` - Filter: Only runs `TestRunIntegrationTestsWithFeatureFlag` - Purpose: Validates backward compatibility ### Attach strategy tests - Job: `integration kubernetes attach` - Feature flag: `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - Filter: Only runs `TestRunIntegrationTestsWithFeatureFlag` - Purpose: Tests the newer attach-based execution strategy ## RBAC and permissions ### Dynamic permission provisioning The provisioning system (`mage k8s:provisionIntegrationKubernetes`) analyzes the codebase to generate the minimal required RBAC permissions: 1. Code analysis: Scans `/executors/kubernetes/` for Kubernetes API calls 1. Permission generation: Creates the role YAML with only required permissions 1. Resource creation: Applies the generated RBAC to the `k8s-runner-integration-tests` namespace This system ensures tests use the same permissions as the code under test. ### Test-specific service accounts Each pipeline creates unique resources: - Service account: `k8s-runner-integration-tests-runner-$CI_PIPELINE_ID` - Role: Generated based on code analysis - Role binding: Links service account to generated role ### Administrative permissions Integration tests also use administrative RBAC for test management: - Service account: `integration-tests-admin` - Purpose: Create/delete test resources, observe cluster state - Scope: Additional permissions beyond normal runner operations ## Test implementation ### Test environment Tests run with the following environment variables: - `KUBERNETES_SERVICE_ACCOUNT_OVERWRITE`: Pipeline-specific service account - Feature flag variables (for feature flag tests) - Cluster connection details (managed by infrastructure) ## Resource management ### Automated cleanup The infrastructure includes automated cleanup mechanisms. For detailed information about CronJobs, scheduling, and configuration, see the [operational automation](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#operational-automation) section in the infrastructure repository. ### Resource isolation Tests use resource groups to prevent conflicts: - `"$CI_COMMIT_REF_SLUG-k8s-integration"` - `"$CI_COMMIT_REF_SLUG-k8s-integration-exec-legacy"` - `"$CI_COMMIT_REF_SLUG-k8s-integration-attach"` ## Monitoring and observability ### Metrics and logging The test infrastructure includes comprehensive monitoring and logging. For information on accessing Grafana, Prometheus dashboards, log aggregation with Loki, and the available `make` commands, see the [metrics](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#metrics) and [log collection](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#log-collection) sections in the infrastructure repository. ## Troubleshooting ### Common issues - Test timeouts: - Check cluster resource availability. - Verify worker pool scaling (0-6 nodes). - Review test parallelism settings. - RBAC permissions: - Ensure provisioning job succeeded. - Verify service account creation. - Check generated Role matches code requirements. - Resource conflicts: - Check resource group isolation. - Verify cleanup job execution. - Review pipeline-specific naming. ### Debugging steps 1. Check the infrastructure status. For more information about the `make` commands and infrastructure management, see [blue-green deployment](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#blue-green-deployment). 1. Review test logs: - Check pipeline job logs for specific failures. - Use Grafana dashboard for aggregated logs. For more information, see [log collection](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra#log-collection). - Review `gotestsum` output for test-specific issues. 1. Validate RBAC: ```shell kubectl get sa,role,rolebinding -n k8s-runner-integration-tests kubectl describe role k8s-runner-integration-tests-runner-$CI_PIPELINE_ID -n k8s-runner-integration-tests ``` ## Running tests locally Integration tests are designed to run in the CI/CD environment with the dedicated infrastructure. Local execution requires: 1. Access to the GKE cluster. 1. Appropriate RBAC permissions. 1. Environment variables that match the CI/CD configuration. For local development, use unit tests or a local Kubernetes cluster (`kind/minikube`) with appropriate setup. ## Related topics - [GitLab Runner Kubernetes executor](../../../executors/kubernetes/_index.md) - [Runner Kubernetes infrastructure repository](https://gitlab.com/gitlab-org/ci-cd/runner-tools/runner-kubernetes-infra) - [GitLab Runner Infrastructure Toolkit (GRIT)](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit) ================================================ FILE: docs/development/internal/ci/packages_iteration.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Packages Iteration --- The `PACKAGES_ITERATION` variable is used to set the iteration of the `deb` and `rpm` packages. From `fpm`'s help: > --iteration The iteration to give to the package. RPM calls this the 'release'. FreeBSD calls it 'PORTREVISION'. Debian calls this 'Debian_revision' The `PACKAGES_ITERATION` variable is intended to be incremented manually for cases where broken packages are released. Instead of removing the packages and re-releasing them, we can increment the iteration and release new packages. The mage target `package:verifyIterationVariable` is used to ensure that the `PACKAGES_ITERATION` variable is set correctly across branches. For the rules governing `PACKAGES_ITERATION`, read the documentation on the `VerifyIterationVariable` function. The workflow is as follows: - In `main` the `PACKAGES_ITERATION` value will always be `1`. - When a stable branch is created, the `PACKAGES_ITERATION` value will be `1`. - When a tag is created from the stable branch the `PACKAGES_ITERATION` value will be `1`. - When we need to release a new package - The tag will be deleted - The `PACKAGES_ITERATION` value will be incremented in the stable branch - The tag will be recreated - If we need to merge the stable branch back to main the `PACKAGES_ITERATION` value will be set to `1` beforehand, otherwise the `package` jobs will fail because the iteration checks will fail ## Future iterations We could make the iteration check automatic by checking the published packages, but technically we need to check all of them to make sure we know the highest iteration, so we can increment off of that. This sounds time-consuming in terms of CI time. For now we'll stick to the manual approach. ================================================ FILE: docs/development/internal/engineering/executor_interface/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Internal Executor Interface --- > [!note] > As this is a documentation of the code internals, it's easier to get it outdated than > documentation of configuration, behaviors or features that we expose to the users. This > page is accurate as for the date of creation: **2022-01-26**. ## Interfaces GitLab Runner uses a concept of what we name `executors` to define a way of how a job may be executed. While the current philosophy behind GitLab CI/CD job execution is that _everything is a shell script_, this script may be executed in a different ways, for example: - in a shell directly on a host where GitLab Runner is working, - in a shell on an external host available through SSH, - in a shell in a virtual machine managed by VirtualBox or Parallels, - in a shell in a container managed by Docker, and few others. There is also the _Custom Executor_, which allows the user to interact with a very simple externally exposed interface to implement their own way of job execution. All of these _executors_ are orchestrated internally by GitLab Runner process. And for that Runner is using a set of Go interfaces that need to be implemented by the executor to work. The two main interfaces (part of the `common` package) that manage an executor's lifetime and job execution are: - `Executor` - `ExecutorProvider` ```go type Executor interface { // Shell returns data about the shell and scripts this executor is bound to. Shell() *ShellScriptInfo // Prepare prepares the environment for build execution. e.g. connects to SSH, creates containers. Prepare(options ExecutorPrepareOptions) error // Run executes a command on the prepared environment. Run(cmd ExecutorCommand) error // Finish marks the build execution as finished. Finish(err error) // Cleanup cleans any resources left by build execution. Cleanup() // GetCurrentStage returns current stage of build execution. GetCurrentStage() ExecutorStage // SetCurrentStage sets the current stage of build execution. SetCurrentStage(stage ExecutorStage) } type ExecutorProvider interface { // CanCreate returns whether the executor provider has the necessary data to create an executor. CanCreate() bool // Create creates a new executor. No resource allocation happens. Create() Executor // Acquire acquires the necessary resources for the executor to run, e.g. finds a virtual machine. Acquire(config *RunnerConfig) (ExecutorData, error) // Release releases any resources locked by Acquire. Release(config *RunnerConfig, data ExecutorData) // GetFeatures returns metadata about the features the executor supports, e.g. variables, services, shell. GetFeatures(features *FeaturesInfo) error // GetConfigInfo extracts metadata about the config the executor is using, e.g. GPUs. GetConfigInfo(input *RunnerConfig, output *ConfigInfo) // GetDefaultShell returns the name of the default shell for the executor. GetDefaultShell() string } ``` All the existing executors are also extending the `executors.AbstractExecutor` struct (named `AbstractExecutor` further in this document), which implements a small, common set of features. While there is no protection in code that would ensure usage of `AbstractExecutor` (until the new code implements the interfaces - it will work), it's expected that the new executors will extend it - to ensure consistent behavior of some features across executors. For convenience there is also the `executors.DefaultExecutorProvider` that implements the `ExecutorProvider` interface and is suitable for most cases. However, each executor may decide to implement its _provider_ independently (which in fact is currently done only by the Docker Machine executor). What's important, because both `Executor` and `ExecutorProvider` are interfaces, the implementation allows to "stack" different structs. The usage of this possibility will be shown with one of the examples. ### `Executor` interface The `Executor` interface is responsible for the job execution management. The described methods are managing preparation of the job environment (`Prepare()`), job script executions (`Run()` and `Finish()`; note: subsequent job steps are executed with a separate `Run()` calls) and job environment cleanup (`Cleanup()`). It also provides integration for internal Prometheus metrics exporter to label some relevant metrics with information about the current executor usage stage (`GetCurrentStage()`, `SetCurrentStage()`). The `Shell()` method is currently used in one place, and it's fully implemented in the mentioned `AbstractExecutor` struct. Given the existing implementation and evolution of different executors over time, it seems that this method should be pulled off the executor interface and handled in some different way. Hopefully - in a way that will enforce usage of `AbstractExecutor`. Usage of the interface, in very simplification, goes as follows: 1. The instance of an _executor_ was provided and assigned to a received job. 1. `Shell()` is called to get the configuration of a shell. It's used to prepare all the scripts that will be executed for the job. 1. `Prepare()` is called to prepare the job environment (for example creating a Kubernetes Pod, a set of Docker containers or a VirtualBox VM). It's also a place for the specific executor implementation to handle its own preparation. Through the usage of `AbstractExecutor` all the executors will also get access to some common features like for example job trace object. 1. `Run()` is called several times, each time containing details about the script for a job execution step to be executed with the executor. 1. `Finish()` is called after execution of all job stages is done and when job is being marked as finished. Some executors may take a usage of this moment. Most of them defers to `AbstractExecutor`. 1. `Cleanup()` is called to cleanup the job environment. It's the opposite of `Prepare()`. Additionally `SetCurrentStage()` is called internally by the executors (however most of them defer to `AbstractExecutor`) to mark on what _executor usage stage_ the system is now within this executor instance. And `GetCurrentStage()` is called externally in random moments by the metrics collector. The value is then used to summarize information about different jobs and label some of the metrics. ### `ExecutorProvider` interface The `ExecutorProvider` interface is responsible for preparation of the executor itself. It builds an abstraction around the `Executor` concept. With this abstraction, what the user configures with the `config.toml` `executor` setting is in fact the executor provider. And then for every job executed by the runner a new, independent instance of the _executor_ is prepared. The maintenance of the _executor_ is done by the `ExecutorProvider`. The described methods are managing creation of the executor instance (`CanCreate()`, `Create()`), reservation of provider's resources for a potential job (`Acquire()`, `Release()`). There is also support for gathering some information that should be reported to GitLab when requesting jobs (`GetFeatures()`, `GetConfigInfo()`). And finally a method that gives information about the shell that should be used with the provided executor (`GetDefaultShell()`). Usage of the interface, in very simplification, goes as follows: 1. `CanCreate()`, `GetFeatures()` and `GetDefaultShell()` are executed at the provider registration to validate that the provider is able to work in general. 1. Before requesting a new job for the specific `[[runners]]` worker the `Acquire()` is called to check and do a reservation of provider resources for the job. This is a place where the provider may control its capacity and return information about some preallocated resources. 1. `GetFeatures()` is called several times to ensure that information about features supported by Runner can be sent back with different API requests to GitLab. One of the calls is made when preparing the initial request for a job. 1. Same goes for the `GetConfigInfo()` which is called only once, when preparing the initial request for a job. It allows to send some information about used configuration to GitLab. 1. Same goes for the `GetDefaultShall()` which is also called only once, when preparing the initial request for a job. It allows to send information about used shell to GitLab. 1. If the job was received, it's preparation is started and at some moment `Create()` is called to create a new instance of the executor. 1. When the job execution is fully done, `Release()` is called. This is a place where the provider may handle releasing resources that were previously reserved for the job. List of features that can be reported to GitLab can be found in the `FeaturesInfo` struct in `common/network.go`. ## `DefaultExecutorProvider` As `DefaultExecutorProvider` is currently one of two existing implementations of `ExecutorProvider` interface and is used by most of the executors, let's describe how it's built. ```go type DefaultExecutorProvider struct { Creator func() common.Executor FeaturesUpdater func(features *common.FeaturesInfo) ConfigUpdater func(input *common.RunnerConfig, output *common.ConfigInfo) DefaultShellName string } ``` The `Creator` is the most important part. It's a function that returns a new instance of the given `Executor` interface implementation. It is being implemented by each of the executors. It's required to be implemented. The interface's `CanCreate()` method will fail if the `Creator` is left empty. Call to provider's `Create()` is proxied to the `Creator` function. `FeaturesUpdater` and `ConfigUpdater` are functions that allow to request the feature and config information. All executors are using these functions to expose information about supported features or config details. The `FeaturesUpdater` is optional and every executor have to report which features from the list are supported. `ConfigUpdater` is optional and can be skipped. `DefaultShellName` must be set by every executor. Provider's `GetFeatures()`, `GetConfigInfo()`, `GetDefaultShell()` calls will use the defined updaters and the shell name to expose needed data to the caller. `Acquire()` and `Release()` are a NOOP. `DefaultExecutorProvider` doesn't use the concept of resources management and simply creates a new instance of the executor for every call. ## Usage examples ### Shell _Shell executor_ is the simplest executor that GitLab Runner provides. It executes the job script in a simple shell process, created directly on the host where GitLab Runner is running itself. There is no virtualization, no containers, no network communication here. #### ExecutorProvider Shell executor uses the `DefaultExecutorProvider`. It reports usage of very limited number of features (two in all cases, two more if the platform is not `windows`). It doesn't expose any configuration details. The shell depends on what's the default value for the platform where the Runner is operating. It's configured as a login shell. #### Executor `Prepare()` doesn't have anything specific. As the shell executor executes everything directly in the system where Runner process exists, it just makes sure that the builds and cache paths are usable. After that it defers to `AbstractExecutor` steps of preparation. `Run()` uses the provided script details to construct `os/exec.Cmd` call. Shell executor ensures that STDIN/STDOUT/STDERR are passed properly between the script execution shell process started by that call and the job trace object. It also detects the exit code of the command and reports it back as expected by the interface. There is no custom implementation of `Finish()` nor `Cleanup()`. The executor defers to the common steps in `AbstractExecutor.` ### Docker _Docker executor_ is probably the most powerful and mature of GitLab Runner executors. It supports most of the features available in `.gitlab-ci.yml`. It allows to run every job in an environment separated from other jobs. All jobs are however executed on one host and the capacity of the runner is limited by that host's available resources. Docker executor comes with a special variant - the SSH one. To make this documentation easier to understand (as the executor descriptions are just examples to help understand how the executor interface works) we will describe just the "normal" variant of Docker executor. There is also the `windows` variant of the executor. We will not include its details in this description as well. In Docker executor the jobs are executed in Docker containers. Each job gets a set of connected containers sharing at least one volume with the working directory. The main container is created from the image specified by the user. It needs to expose a shell where Runner will execute the script. Additionally, Runner will create what we call `predefined` container from the `helper` image provided by Runner. This container will be used to execute scripts handling common tasks like updating the Git and Git LFS sources, operating with cache and operating with artifacts. Depending on the job configuration Runner may create more containers for the defined `services`. These will be linked by the networking to the main container, so that the job script can utilize network available services exposed by them. #### ExecutorProvider Docker executor also uses the `DefaultExecutorProvider`. It reports usage of few more executor-related features, and additionally it reports some configuration details. The shell is hardcoded and differs between the platforms. In case of the most popular `linux` variant of Docker executor, it's configured as a non-login shell. #### Executor `Prepare()` is highly utilized in this executor. During that step Runner will prepare different internal tools (like volumes manager or network manager) and set up the basic configuration that will be next used by the containers for job execution. It's also the step when all the images defined for the job are pulled. Creation of volumes, device binding and service containers also happens during that step. After `Prepare()` is done the environment should be fully ready to start creating predefined/job step execution containers, connecting them to the whole stack and execute scripts in them. `Run()` creates predefined or job step containers, attaches to them and executes the script in a shell that should be running as the main process of the container. It proxies the STDOUT and STDERR of the containers to the job trace object. It also uses the Docker Engine API to detect the script execution exit code. `Finish()` doesn't have any custom behavior here, and it just defers to the `AbstractExecutor`. `Cleanup()` is the opposite of `Prepare()`, so it removes all the defined resources like containers, volumes (that were not configured as persistent), job specific network (if used). ### Docker Machine (autoscaling capabilities) _Docker Machine executor_ is in fact an autoscaling provider built on top of the regular _Docker executor_. It takes advantage of the interface concept and encapsulates the Docker executor in itself. Responsibility of Docker Machine executor is mostly focused on the `ExecutorProvider` interface. With that it manages a pool of VMs with Docker Engine running on them. Management is done by using the _Docker Machine_ tool by running an `os/exec.Cmd` calls to it. Management of the VMs may be done in "on-demand" or "autoscaled in background" modes. Chosen mode depends on configuration provided by the user. In the first mode the VMs will be created for each received job, until the limit of jobs is reached. In the second mode it will maintain a configurable set of `Idle` VMs that await for jobs. Jobs are then requested only when there is at least one `Idle` VM. When one `Idle` VM is taken for a received job, another is created to replace it. When the VM is returned to the pool (if configured to do so) and the number of `Idle` exceeds the defined limit, the provider starts to remove some of them. This loop that tries to maintain the desired number of `Idle` VMs and desired total number of managed VMs works all the time in the background. Docker Machine executor is currently implemented in a way that it allows execution of only one job at once on a single VM. For the execution of jobs Docker Machine executor uses the Docker executor and fact, that one can configure access credentials of the Docker Engine API. With that the Docker Machine provider manages the VMs, chooses a VM for a job and instantiates the Docker executor, automatically configuring it to use the credentials and API endpoint of the VM. With that jobs are executed like with the normal Docker executor (supporting all the different features available for it in `.gitlab-ci.yml` syntax), but does that on an external host, independent for each job. #### ExecutorProvider Docker Machine executor brings its own implementation of the `ExecutorProvider` interface! However, as it internally uses the Docker executor, it also instantiates the Docker executor provider (which itself is the specific configuration of `DefaultExecutorProvider`) and either proxies some calls to it directly or calls it internally for its own purpose. `CanCreate()` is proxied directly to Docker executor. Same goes for `GetFeatures()`, `GetConfigInfo()` and `GetDefaultShell()`. `Create()` is very simple as its returns the `machineExecutor` (implementation of the `Executor` interface) with access to itself, so that steps like `Prepare()` or `Cleanup()` can use it to maintain the autoscaled VMs (more about that will be described bellow). This provider is also the one that finally takes the usage of `Acquire()` and `Release()` methods of the `Executor Provider` interface. Behavior of `Acquire()` depends on the configured mode. In the "on-demand" mode it's used as a place to kick one of the old machines cleanup calls. It doesn't do any real acquiring and even logs that with `IdleCount is set to 0 so the machine will be created on demand in job context` (this is not user facing and available in the Runner process logs). With that the provider will try to create the VM in context of the job. If there is anything that will cause a failure: exceeding the defined limits in autoscaling configuration, wrong autoscaling configuration, cloud provider errors, Docker Engine availability problems - it will cause a failure of the job. In the "autoscaled in background" mode, it will check if there is any `Idle` VM that is available. If it is, it will reserve it and allow Runner to send a request for new job. If job is received, it will get information about the acquired VM. If there is no available `Idle` VMs, then the call to `Acquire()` will fail, which will prevent Runner from sending a request for a job (and in Runner logs will be logged as the `no free machines that can process builds` warning). `Release()` behaves in the same in both modes. It will check if the VM that was used for the job is applicable for removal and will trigger a remove in that case. In other cases, it will signal the internal autoscaling coordination mechanism that the VM was released and it's back in the `Idle` pool, so that it can be used again. #### Executor The `Executor` interface implementation is also a mix of a code specific for Docker Machine executor and encapsulation of Docker executor. Docker Machine executor injects all the work needed to maintain, chose and use the VMs and to configure the dedicated Docker executor instance, and then it depends on this executor to handle the rest. `Shell()` call defers to Docker executor, which itself defers to `AbstractExecutor` (as all the executors do). `Prepare()` prepares the VM to use. Depending on the configured mode it may mean using the preallocated VM or creating it on-demand. In the "on-demand" mode this is the place where eventual failure caused by VM creation may fail the job. Having the VM details it updates the configuration of Docker executor by pointing the host and credentials to access Docker Engine and instantiates the Docker executor provider. Finally, it calls Docker Executor's `Prepare()` to handle all the job environment preparation as it was described in the previous example. `Run()` and `Finish()` have no specific behavior. They simply proxy the call to the internal Docker executor. `GetCurrentStage()` and `SetCurrentStage()` are also proxies to the Docker executor, which itself defers to the `AbstractExecutor` implementation. Finally, the `Cleanup()` call does two things. First, it internally calls Docker executor's `Cleanup()` method to clean the job environment on the VM as it was described in the previous example. Then it calls providers `Release()` to signal that the job is done and that the VM can be released. ================================================ FILE: docs/development/reviewing-gitlab-runner.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Reviewing GitLab Runner --- This document contains rules and suggestions for GitLab Runner project reviewers. ## Reviewing tests coverage reports In the GitLab Runner project, we have a lot of code. Unfortunately, the code coverage is not comprehensive. Currently, (early 2019), the coverage is on the level of ~55%. While adding tests to a legacy code is a hard task, we should ensure that new code that is being added to the project has good tests coverage. Code reviewers are encouraged to look on the coverage reports and ensure new code is covered. We should aim for as much test coverage for new code as possible. Defining the level of required coverage for a specific change is left for the reviewer judgment. Sometimes 100% coverage will be something simple to achieve. Sometimes adding code with only 20% of the coverage will be realistic and will ensure that the most important things are being tested. Dear reviewer - chose wisely :) Getting back to the technical details... The GitLab Runner CI/CD pipeline helps us here and provides the coverage reports in HTML format, for tests executed in regular (`count`) and race (`atomic`) modes. We have two types of the reports: containing `.race` and `.regular` as part of the filename. The files are tracking output of `go test` command executed with coverage options. The `.race.` files contain sources and reports for tests started with `-race` flag, while the `.regular.` files are sources and reports for tests started without this option. For those who are interested in details, the `-race` tests are using `atomic` coverage mode, while the standard tests are using `count` coverage mode. For our case, the `coverage/coverprofile.regular.html` file is what we should look at. `.race.` tests can fail in race condition situations (this is why we're executing them) and currently we have several of them that are constantly failing. This means that the coverage profile may not be full. The `.regular.` tests, instead, should give us the full overview of what's tested inside our code. To view a code coverage report for a merge request: 1. In the merge request's **Overview** tab, under the pipeline result, click on **View exposed artifact** to expand the section. 1. Click on **Code Coverage**. 1. Use the artifact browser to navigate to the `out/coverage/` directory. For example, `https://gitlab.com/gitlab-org/gitlab-runner/-/jobs/172824578/artifacts/browse/out/coverage/`. This directory will always contain six files - three `.race.` files and three `.regular.` files. For reviewing changes, we're mostly interested in looking at the `.regular.` HTML report (the `coverprofile.regular.html` file). As you can see, all files are visible as external links, so for our example we will open `https://gitlab.com/gitlab-org/gitlab-runner/-/jobs/172824578/artifacts/file/out/coverage/coverprofile.regular.html` which will redirect us to `https://gitlab-org.gitlab.io/-/gitlab-runner/-/jobs/172824578/artifacts/out/coverage/coverprofile.regular.html` where the report is stored. The coverage data should be also [visible in the merge request UI](https://docs.gitlab.com/ci/testing/code_coverage/). ## Reviewing the merge request title Because we generate [`CHANGELOG.md`](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/CHANGELOG.md) entries from the merge request titles, making sure that the title is valid and informative is a part of the reviewer and maintainer's responsibilities. Before merging a merge request, check the title and update it if you think it will not be clear in the `CHANGELOG.md` file. Keep in mind that the changelog will have only this one line, without the merge request description, discussion or diff that provide more context. As an example, look at and compare: - `yml to yaml` - which is the original title and was added to changelog with our script, - `Fix values.yaml filename in documentation` - which is what I've updated it to in the changelog. What will `yml to yaml` tell a GitLab Runner administrator if they review the changelog before updating to a newer version? Does it show the risks behind the update, the implemented behavior changes, a new behavior/features that were added? Keep these questions in mind when reviewing the merge request and its title. Contributors may not be aware of the above information, and that their titles may not match our requirements. Try to educate the contributor about this. In the end, it's your responsibility to verify and update the title **before the merge request is merged**. ## Reviewing the merge request labels We use labels assigned to merge requests to group changelog entries in different groups and define some special features of individual entries. For changelog generation we're using our own [Changelog generator](https://gitlab.com/gitlab-org/ci-cd/runner-tools/gitlab-changelog). The tool is using [a configuration file](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/.gitlab/changelog.yml) that is committed to the GitLab Runner repository. There are few important things that the reviewer should know about Changelog generator: - GitLab Changelog analyzes merge request labels in the order in which `label_matchers` are defined. First matched scope is used for the analyzed merge request. For example, if there would be two merge request - first one containing labels `security` and `bug`, second one containing only the `bug` label - and there would be three matchers defined in this order: `[security, bug] -> [security] -> [bug]`, then the first merge request would be added to the scope matched by `[security, bug]` (so the first defined on the list) and the second merge request would be added to the scope matched by `[bug]` (so the last defined scope on the list). - Merge requests labeled with labels defined at `authorship_labels` will be added to the changelog with the author's username added at the end. All `authorship_labels` labels need to be added to the merge request for it to be marked in this way. - Merge requests labeled with labels defined at `skip_changelog_labels` will be skipped in the changelog. All `skip_changelog_labels` labels need to be added to the merge request for it to be skipped. - Merge request not matching any of the defined `label_matchers` are added to the `Other changes` scope bucket. Having all of that in mind, please follow these few rules when merging the merge request: - Any merge request related to how GitLab Runner or its parts are distributed should be labeled with the `runner-distribution` label. - Any merge request that touches security - no matter if it's a new feature or a bug fix - should have the `security` label. All merge requests that are not `feature::addition` will be then added to the security scope. - Any bug fix merge request should have the `bug` label. - In most merge requests that are not documentation update only or explicitly a bug fix, make sure that one of the `feature::` or `tooling::` labels is added. This will help us sort the changelog entries properly. - `documentation` label is added automatically when the Technical Writing review is done. **Even when the merge request updates more than only documentation**. If the merge request has only the `documentation` label and doesn't have any other label matching any of the defined `label_matchers` - double check that the merge request updates the documentation only. **Otherwise use one of the specific labels matching the type of the change that is being added!** - When you revert a change that was merged during the same release cycle, label the original merge request and the revert one with labels defined in `skip_changelog_labels`. This will reduce the manual work that release manager needs to do when preparing the release. We should not add entries about adding a change and reverting the change if both events happened in the same version. If the revert merge request reverts something, that was merged to an already release version of GitLab Runner, just make sure to label it with the right scope labels. In that case we want to mark the revert in the changelog. - Please also take a moment to read through [Engineering metrics data classification](https://handbook.gitlab.com/handbook/product/groups/product-analysis/engineering/metrics/#work-type-classification) page, which gives some guidance about when certain labels should be used. ## Summary Dear reviewer, you've got your sword. Now go fight with the dragons! ================================================ FILE: docs/executors/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Executors --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner implements different executors that can be used to run your builds in different environments: - [Kubernetes](kubernetes/_index.md) - [Docker](docker.md) - [Docker Autoscaler](docker_autoscaler.md) - [Instance](instance.md) [Other executors](#executors-in-maintenance-mode) are available that are not under active feature development. They receive critical security updates but no new features. > [!note] > Some features require a runner that uses [fleeting](../fleet_scaling/fleeting.md). The Docker Autoscaler > and instance executors use fleeting. You should migrate to one of these executors to take advantage > of the full range of GitLab Runner capabilities. If you are not sure about which executor to select, see [selecting the executor](#selecting-the-executor). For more information about features supported by each executor, see the [compatibility chart](#compatibility-chart). These executors are locked and we are no longer developing or accepting new ones. For more information, see [contributing new executors](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CONTRIBUTING.md#contributing-new-executors). ## Selecting the executor The executors support different platforms and methodologies for building a project. The following diagram shows which executor to choose based on your operating system and platform: ```mermaid flowchart LR Start([Executor
Selection]) --> Auto{Autoscaling?} Auto -->|YES| Platform{Platform?} Auto -->|NO| BuildType{Build
Type?} Platform -->|Cloud
Native| K8s[Kubernetes] Platform -->|Cloud
VMs| OS1{OS?} OS1 -->|Linux| L1[Fleeting:
Docker Autoscaler
or Instance] OS1 -->|macOS| M1[Fleeting:
Docker Autoscaler
or Instance] OS1 -->|Windows| W1[Fleeting:
Docker Autoscaler
or Instance] BuildType -->|Container| OS2{OS?} BuildType -->|Shell| OS3{OS?} OS2 -->|Linux| L2[Docker
Podman] OS2 -->|macOS| M2[Docker] OS2 -->|Windows| W2[Docker] OS3 -->|Linux| L3[Bash
Zsh] OS3 -->|macOS| M3[Bash
Zsh] OS3 -->|Windows| W3[PowerShell 5.1
PowerShell 7.x] OS3 -->|Remote| R3[SSH
#40;maintenance mode#41;] classDef question fill:#e1f3fe,stroke:#333,stroke-width:2px,color:#000 classDef result fill:#dcffe4,stroke:#333,stroke-width:2px,color:#000 classDef start fill:#f9f9f9,stroke:#fff,stroke-width:2px,color:#000 class Start start; class Auto,Platform,BuildType,OS1,OS2,OS3 question; class K8s,L1,M1,W1,L2,M2,W2,L3,M3,W3,R3 result; ``` > [!warning] > SSH executor is in maintenance mode. It receives critical security updates but no new features > are planned. Also, it's among the least supported executors. For local shell-based builds, > consider using the Shell executor instead. The table below shows the key facts for each executor which helps you decide which executor to use: > [!note] > SSH, Shell, VirtualBox, Parallels, and Custom executors are in maintenance mode. > They receive critical security updates but no new features are planned. | Executor | Docker | Docker Autoscaler | Instance | Kubernetes | SSH | Shell | VirtualBox | Parallels | Custom | |:-------------------------------------------------|:------:|:-----------------:|-------------------------:|:--------------:|:----:|:--------------:|:--------------:|:--------------:|:------------------------:| | Clean build environment for every build | ✓ | ✓ | conditional 1 | ✓ | ✗ | ✗ | ✓ | ✓ | conditional 1 | | Reuse previous clone if it exists | ✓ | ✓ | conditional 1 | ✓ 2 | ✓ | ✓ | ✗ | ✗ | conditional 1 | | Runner file system access protected 3 | ✓ | ✓ | ✗ | ✓ | ✓ | ✗ | ✓ | ✓ | conditional | | Migrate runner machine | ✓ | ✓ | ✓ | ✓ | ✗ | ✗ | partial | partial | ✓ | | Zero-configuration support for concurrent builds | ✓ | ✓ | ✓ | ✓ | ✗ | ✗ 4 | ✓ | ✓ | conditional 1 | | Complicated build environments | ✓ | ✓ | ✗ 5 | ✓ | ✗ | ✗ 5 | ✓ 6 | ✓ 6 | ✓ | | Debugging build problems | medium | medium | medium | medium | easy | easy | hard | hard | medium | **Footnotes**: 1. Depends on the environment you are provisioning. Can be completely isolated or shared between builds. 1. Requires [persistent per-concurrency build volumes](kubernetes/_index.md#persistent-per-concurrency-build-volumes) configuration. 1. When a runner's file system access is not protected, jobs can access the entire system, including the runner's token and other jobs' cache and code. Executors marked ✓ don't allow the runner to access the file system by default. However, security flaws or certain configurations could allow jobs to break out of their container and access the file system hosting the runner. 1. If the builds use services installed on the build machine, selecting executors is possible but problematic. 1. Requires manual dependency installation. 1. For example, using [Vagrant](https://developer.hashicorp.com/vagrant/docs/providers/virtualbox "Vagrant documentation for VirtualBox"). ### Docker executor Docker executor provides clean build environments through containers. Dependency management is straightforward, with all dependencies packaged in the Docker image. This executor requires Docker installation on the Runner host. This executor supports additional [services](https://docs.gitlab.com/ci/services/) like MySQL. It also accommodates Podman as an alternative container runtime. This executor maintains consistent, isolated build environments. ### Docker Autoscaler executor The Docker Autoscaler executor is an autoscale-enabled Docker executor that creates instances on demand to accommodate the jobs that the runner manager processes. It wraps the [Docker executor](docker.md) so that all Docker executor options and features are supported. The Docker Autoscaler uses [fleeting plugins](https://gitlab.com/gitlab-org/fleeting/fleeting) to autoscale. Fleeting is an abstraction for a group of autoscaled instances, which uses plugins that support cloud providers, like Google Cloud, AWS, and Azure. This executor particularly suits environments with dynamic workload requirements. ### Instance executor The Instance executor is an autoscale-enabled executor that creates instances on demand to accommodate the expected volume of jobs that the runner manager processes. This executor and the related Docker Autoscale executor are the new autoscaling executors that works in conjunction with the GitLab Runner Fleeting and Taskscaler technologies. The Instance executor also uses [fleeting plugins](https://gitlab.com/gitlab-org/fleeting/fleeting) to autoscale. You can use the Instance executor when jobs need full access to the host instance, operating system, and attached devices. The Instance executor can also be configured to accommodate single-tenant and multi-tenant jobs. ### Kubernetes executor You can use the Kubernetes executor to use an existing Kubernetes cluster for your builds. The executor calls the Kubernetes cluster API and creates a new Pod (with a build container and services containers) for each GitLab CI/CD job. This executor particularly suits cloud-native environments, offering superior scalability and resource utilization. ## Executors in maintenance mode These executors receive critical security updates but no new features are planned: - [SSH](ssh.md) - [Shell](shell.md) - [Parallels](parallels.md) - [VirtualBox](virtualbox.md) - [Custom](custom.md) - [Docker Machine](docker_machine.md) (deprecated) ### Shell executor The Shell executor is simplest configuration option for GitLab Runner. It executes jobs locally on the system where GitLab Runner is installed, requiring all dependencies to be manually installed on the same machine. This executor supports Bash for Linux, macOS, and FreeBSD operating systems, while offering PowerShell support for Windows environments. While ideal for builds with minimal dependencies, it only provides limited isolation between jobs. ### SSH executor The SSH executor is added for completeness, but it's among the least supported executors. When you use the SSH executor, GitLab Runner connects to an external server and runs the builds there. We have some success stories from organizations using this executor, but usually you should use one of the other types. ### Custom executor You can use the Custom executor to specify your own execution environments. When GitLab Runner does not provide an executor (for example, Linux containers), it allows you to use custom executables to provision and clean up environments. ### Docker Machine executor (deprecated) > [!warning] > This feature was [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/498268) in GitLab 17.5 > and is planned for removal in 20.0. Use [GitLab Runner Autoscaler](../runner_autoscale/_index.md) instead. The Docker Machine executor is a special version of the Docker executor with support for auto-scaling. It works like the typical Docker executor but with build hosts created on demand by Docker Machine. This capability makes it particularly effective in cloud environments like AWS EC2, offering excellent isolation and scalability for variable workloads. ## Compatibility chart Supported features by different executors. > [!note] > SSH, Shell, VirtualBox, Parallels, and Custom executors are in maintenance mode. > They receive critical security updates but no new features are planned. | Executor | Docker | Docker Autoscaler | Instance | Kubernetes | SSH | Shell | VirtualBox | Parallels | Custom | |:---------------------------------------------|:------:|:-----------------:|:--------------:|:----------:|:--------------:|:--------------:|:----------------:|:----------------:|:-----------------------------------------------------------:| | Secure Variables | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | `.gitlab-ci.yml`: image | ✓ | ✓ | ✗ | ✓ | ✗ | ✗ | ✓ (1) | ✓ (1) | ✓ (by using [`$CUSTOM_ENV_CI_JOB_IMAGE`](custom.md#stages)) | | `.gitlab-ci.yml`: services | ✓ | ✓ | ✗ | ✓ | ✗ | ✗ | ✗ | ✗ | ✓ | | `.gitlab-ci.yml`: cache | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | `.gitlab-ci.yml`: artifacts | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | Passing artifacts between stages | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | Use GitLab Container Registry private images | ✓ | ✓ | not applicable | ✓ | not applicable | not applicable | not applicable | not applicable | not applicable | | Interactive Web terminal | ✓ | ✗ | ✗ | ✓ | ✗ | ✓ | ✗ | ✗ | ✗ | **Footnotes**: 1. Support [added](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1257) in GitLab Runner 14.2. Refer to the [Overriding the base VM image](../configuration/advanced-configuration.md#overriding-the-base-vm-image) section for further details. Supported systems by different shells: | Shells | Bash | PowerShell Desktop | PowerShell Core | Windows Batch (deprecated) | |:-------:|:--------------:|:------------------:|:----------------:|:--------------------------:| | Windows | ✗ 2 | ✓ 3 | ✓ 1,4 | ✓ | | Linux | ✓ 1 | ✗ | ✓ | ✗ | | macOS | ✓ 1 | ✗ | ✓ | ✗ | | FreeBSD | ✓ 1 | ✗ | ✗ | ✗ | **Footnotes:** 1. Default shell for runner registration and for jobs with the `shell` executor. 1. Bash shell is not supported on Windows. 1. Default shell for jobs with the `docker-windows` and `kubernetes` executors. 1. Default shell for jobs with the `shell` executor on Windows. Supported systems for interactive web terminals by different shells: | Shells | Bash | PowerShell Desktop | PowerShell Core | Windows Batch (deprecated) | | :-----: | :--: | :----------------: | :-------------: | :------------------------: | | Windows | ✗ | ✓ | ✓ | ✗ | | Linux | ✓ | ✗ | ✓ | ✗ | | macOS | ✓ | ✗ | ✓ | ✗ | | FreeBSD | ✓ | ✗ | ✗ | ✗ | ## Git requirements for non-Docker executors Executors that do not [rely on a helper image](../configuration/advanced-configuration.md#helper-image) require a Git installation on the target machine and in the `PATH`. Always use the [latest available version of Git](https://git-scm.com/downloads/). GitLab Runner uses the `git lfs` command if [Git LFS](https://git-lfs.com/) is installed on the target machine. Ensure Git LFS is up to date on any systems where GitLab Runner uses these executors. Be sure to initialize Git LFS for the user that executes GitLab Runner commands with `git lfs install`. You can initialize Git LFS on an entire system with `git lfs install --system`. To authenticate Git interactions with the GitLab instance, GitLab Runner uses [`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/). Depending on the [`FF_GIT_URLS_WITHOUT_TOKENS`](../configuration/feature-flags.md) setting, the last used credential might be cached in a pre-installed Git credential helper (for example [Git credential manager](https://github.com/git-ecosystem/git-credential-manager)) if such a helper is installed and configured to cache credentials: - When [`FF_GIT_URLS_WITHOUT_TOKENS`](../configuration/feature-flags.md) is `false`, the last used [`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/) is stored in pre-installed Git credential helpers. - When [`FF_GIT_URLS_WITHOUT_TOKENS`](../configuration/feature-flags.md) is `true`, the [`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/) is never stored or cached in any pre-installed Git credential helper. ================================================ FILE: docs/executors/custom.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: The Custom executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner provides the Custom executor for environments that it doesn't support natively. For example, `LXD` or `Libvirt`. You can create your own executor by configuring GitLab Runner to use some executable to provision, run, and clean up your environment. The scripts you configure for the custom executor are called `Drivers`. For example, you could create an [`LXD` driver](custom_examples/lxd.md) or a [`Libvirt` driver](custom_examples/libvirt.md). ## Configuration You can choose from a few configuration keys. Some of them are optional. Below is an example of configuration for the Custom executor using all available configuration keys: ```toml [[runners]] name = "custom" url = "https://gitlab.com" token = "TOKEN" executor = "custom" builds_dir = "/builds" cache_dir = "/cache" shell = "bash" [runners.custom] config_exec = "/path/to/config.sh" config_args = [ "SomeArg" ] config_exec_timeout = 200 prepare_exec = "/path/to/script.sh" prepare_args = [ "SomeArg" ] prepare_exec_timeout = 200 run_exec = "/path/to/binary" run_args = [ "SomeArg" ] cleanup_exec = "/path/to/executable" cleanup_args = [ "SomeArg" ] cleanup_exec_timeout = 200 graceful_kill_timeout = 200 force_kill_timeout = 200 ``` For field definitions and which ones are required, see [`[runners.custom]` section](../configuration/advanced-configuration.md#the-runnerscustom-section) configuration. In addition both `builds_dir` and `cache_dir` inside of the [`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section) are required fields. ## Prerequisite software for running a Job The user must set up the environment, including the following that must be present in the `PATH`: - [Git](https://git-scm.com/download) and [Git LFS](https://git-lfs.com/): see [common prerequisites](_index.md#git-requirements-for-non-docker-executors). - [GitLab Runner](../install/_index.md): Used to download/update artifacts and cache. ## Stages The Custom executor provides the stages to configure job details, prepare and clean up the environment, and run the job script in it. Each stage is responsible for specific things and has different things to keep in mind. Each stage executed by the Custom executor is executed at the time a builtin GitLab Runner executor would execute them. Each executed step has access to specific environment variables that provide information about the running job. All stages have the following environment variables available to them: - Standard CI/CD [environment variables](https://docs.gitlab.com/ci/variables/), including [predefined variables](https://docs.gitlab.com/ci/variables/predefined_variables/). - All environment variables provided by the Custom executor Runner host system. - All services and their [available settings](https://docs.gitlab.com/ci/services/#available-settings-for-services). Exposed in JSON format as `CUSTOM_ENV_CI_JOB_SERVICES`. Both CI/CD environment variables and predefined variables are prefixed with `CUSTOM_ENV_` to prevent conflicts with system environment variables. For example, `CI_BUILDS_DIR` is available as `CUSTOM_ENV_CI_BUILDS_DIR`. The stages run in the following sequence: 1. `config_exec` 1. `prepare_exec` 1. `run_exec` 1. `cleanup_exec` ### Services [Services](https://docs.gitlab.com/ci/services/) are exposed as a JSON array as `CUSTOM_ENV_CI_JOB_SERVICES`. Example: ```yaml custom: script: - echo $CUSTOM_ENV_CI_JOB_SERVICES services: - redis:latest - name: my-postgres:9.4 alias: pg entrypoint: ["path", "to", "entrypoint"] command: ["path", "to", "cmd"] ``` The example above sets the `CUSTOM_ENV_CI_JOB_SERVICES` environment variable with the following value: ```json [{"name":"redis:latest","alias":"","entrypoint":null,"command":null},{"name":"my-postgres:9.4","alias":"pg","entrypoint":["path","to","entrypoint"],"command":["path","to","cmd"]}] ``` ### Config The Config stage is executed by `config_exec`. Sometimes you might want to set some settings during execution time. For example setting a build directory depending on the project ID. `config_exec` reads from STDOUT and expects a valid JSON string with specific keys. For example: ```shell #!/usr/bin/env bash cat << EOS { "builds_dir": "/builds/${CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID}/${CUSTOM_ENV_CI_PROJECT_PATH_SLUG}", "cache_dir": "/cache/${CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID}/${CUSTOM_ENV_CI_PROJECT_PATH_SLUG}", "builds_dir_is_shared": true, "hostname": "custom-hostname", "driver": { "name": "test driver", "version": "v0.0.1" }, "job_env" : { "CUSTOM_ENVIRONMENT": "example" }, "shell": "bash" } EOS ``` Any additional keys inside of the JSON string are ignored. If it's not a valid JSON string the stage fails and retries two more times. | Parameter | Type | Required | Allowed empty | Description | |------------------------|---------|----------|----------------|-------------| | `builds_dir` | string | ✗ | ✗ | The base directory where the working directory of the job is created. | | `cache_dir` | string | ✗ | ✗ | The base directory where local cache is stored. | | `builds_dir_is_shared` | boolean | ✗ | not applicable | Defines whether the environment is shared between concurrent job or not. | | `hostname` | string | ✗ | ✓ | The hostname to associate with job's "metadata" stored by the runner. If undefined, the hostname is not set. | | `driver.name` | string | ✗ | ✓ | The user-defined name for the driver. Printed with the `Using custom executor...` line. If undefined, no information about driver is printed. | | `driver.version` | string | ✗ | ✓ | The user-defined version for the drive. Printed with the `Using custom executor...` line. If undefined, only the name information is printed. | | `job_env` | object | ✗ | ✓ | Name-value pairs that are available through environment variables to all subsequent stages of the job execution. They are available for the driver, not the job. For details, see [`job_env` usage](#job_env-usage). | | `shell` | string | ✗ | ✓ | The shell used to execute job scripts. | The `STDERR` of the executable prints to the job log. You can configure [`config_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section) to set a deadline for how long GitLab Runner should wait to return the JSON string before terminating the process. If you define any [`config_args`](../configuration/advanced-configuration.md#the-runnerscustom-section), they are added to the `config_exec` executable in the same order you define them. For example, with this `config.toml` content: ```toml ... [runners.custom] ... config_exec = "/path/to/config" config_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runner would execute it as `/path/to/config Arg1 Arg2`. #### `job_env` usage The main purpose of `job_env` configuration is to pass variables **to the context of custom executor driver calls** for subsequent stages of the job execution. For example, a driver where connection with the job execution environment requires preparing some credentials. This operation is expensive. The driver must request temporary SSH credentials from a local provider before connecting to the environment. With Custom Executor execution flow, each job execution [stage](#stages) (`prepare`, multiple `run` calls, and `cleanup`) runs as separate executions with its own context. For our credentials resolving example, connection to the credentials provider needs to be done each time. If this operation is expensive, do it once for a whole job execution, and then re-use the credentials for all job execution stages. The `job_env` can help here. With this you can connect with the provider once, during the `config_exec` call and then pass the received credentials with the `job_env`. Next, they are added to the list of variables that the custom executor calls for [`prepare_exec`](#prepare), [`run_exec`](#run) and [`cleanup_exec`](#cleanup) are receiving. With this, the driver instead of connecting to the credentials provider each time may just read the variables and use the credentials that are present. The important thing to understand is that **the variables are not automatically available for the job itself**. It fully depends on how the Custom Executor Driver is implemented, and in many cases it is not present there. For information about how to pass a set of variables to every job executed by a particular runner by using the `job_env` setting, see [`environment` setting from `[[runners]]`](../configuration/advanced-configuration.md#the-runners-section). If the variables are dynamic with values that might change between jobs, ensure your driver implementation adds the variables passed by `job_env` to the execution call. ### Prepare The Prepare stage is executed by `prepare_exec`. At this point, GitLab Runner knows everything about the job (where and how it runs). The only thing left is for the environment to be set up so the job can run. GitLab Runner runs the executable specified in `prepare_exec`. This action is responsible for setting up the environment (for example, creating the virtual machine or container, services or anything else). After this is done, we expect that the environment is ready to run the job. This stage is executed only once, in a job execution. You can configure [`prepare_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section) to set a deadline for how long GitLab Runner should wait to prepare the environment before terminating the process. The `STDOUT` and `STDERR` returned from this executable prints to the job log. If you define any [`prepare_exec_args`](../configuration/advanced-configuration.md#the-runnerscustom-section), they are added to the `prepare_exec` executable in the same order you define them. For example, with this `config.toml` content: ```toml ... [runners.custom] ... prepare_exec = "/path/to/bin" prepare_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runner would execute it as `/path/to/bin Arg1 Arg2`. ### Run The Run stage is executed by `run_exec`. The `STDOUT` and `STDERR` returned from this executable prints to the job log. Unlike the other stages, the `run_exec` stage is executed multiple times, because it's split into sub stages listed below in sequential order: 1. `prepare_script` 1. `get_sources` 1. `restore_cache` 1. `download_artifacts` 1. `step_*` 1. `build_script` 1. `step_*` 1. `after_script` 1. `archive_cache` OR `archive_cache_on_failure` 1. `upload_artifacts_on_success` OR `upload_artifacts_on_failure` 1. `cleanup_file_variables` For each stage mentioned above, the `run_exec` executable is executed with: - The usual environment variables. - Two arguments: - The path to the script that GitLab Runner creates for the Custom executor to run. - Name of the stage. For example: ```shell /path/to/run_exec.sh /path/to/tmp/script1 prepare_executor /path/to/run_exec.sh /path/to/tmp/script1 prepare_script /path/to/run_exec.sh /path/to/tmp/script1 get_sources ``` If you have `run_args` defined, they are the first set of arguments passed to the `run_exec` executable, then GitLab Runner adds others. For example, suppose we have the following `config.toml`: ```toml ... [runners.custom] ... run_exec = "/path/to/run_exec.sh" run_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runner runs the executable with the following arguments: ```shell /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 prepare_executor /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 prepare_script /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 get_sources ``` This executable should be responsible for executing the scripts that are specified in the first argument. They contain all the scripts any GitLab Runner executor would run to clone, download artifacts, run user scripts, and all the other steps described below. The scripts can be of the following shells: - Bash - PowerShell Desktop - PowerShell Core - Batch (deprecated) We generate the script using the shell configured by `shell` inside of [`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section). If none is provided the defaults for the OS platform are used. > [!note] > Ensure your `shell` configuration matches the PowerShell version used by your `run_exec` script. > Use `shell = "pwsh"` with `pwsh.exe` (PowerShell Core) > or `shell = "powershell"` with `powershell.exe` (PowerShell Desktop). The table below is a detailed explanation of what each script does and what the main goal of that script is. | Script Name | Script Contents | |-------------------------------|-----------------| | `prepare_script` | Debug information which machine the Job is running on. | | `get_sources` | Prepares the Git configuration, and clone/fetch the repository. We suggest you keep this as is because you get all of the benefits of Git strategies that GitLab provides. | | `restore_cache` | Extract the cache if any are defined. This expects the `gitlab-runner` binary is available in `$PATH`. | | `download_artifacts` | Download artifacts, if any are defined. This expects `gitlab-runner` binary is available in `$PATH`. | | `step_*` | Generated by GitLab. A set of scripts to execute. It may never be sent to the custom executor. It may have multiple steps, like `step_release` and `step_accessibility`. This can be a feature from the `.gitlab-ci.yml` file. | | `after_script` | [`after_script`](https://docs.gitlab.com/ci/yaml/#after_script) defined from the job. Runs in a separate shell context. Always runs, even if previous steps fail, including `pre_build_script`. | | `archive_cache` | Creates an archive of all the cache, if any are defined. Only executed when `build_script` was successful. | | `archive_cache_on_failure` | Creates an archive of all the cache, if any are defined. Only executed when `build_script` fails. | | `upload_artifacts_on_success` | Upload any artifacts that are defined. Only executed when `build_script` was successful. | | `upload_artifacts_on_failure` | Upload any artifacts that are defined. Only executed when `build_script` fails. | | `cleanup_file_variables` | Deletes all [file based](https://docs.gitlab.com/ci/variables/#use-file-type-cicd-variables) variables from disk. | ### Cleanup The Cleanup stage is executed by `cleanup_exec`. This final stage is executed even if one of the previous stages failed. The main goal for this stage is to clean up any of the environments that might have been set up. For example, turning off VMs or deleting containers. The result of `cleanup_exec` does not affect job statuses. For example, a job is marked as successful even if the following occurs: - Both `prepare_exec` and `run_exec` are successful. - `cleanup_exec` fails. You can configure [`cleanup_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section) to set a deadline of how long GitLab Runner should wait to clean up the environment before terminating the process. The `STDOUT` of this executable is printed to GitLab Runner logs at a `DEBUG` level. The `STDERR` is printed to the logs at a `WARN` level. If you define any [`cleanup_exec_args`](../configuration/advanced-configuration.md#the-runnerscustom-section), they are added to the `cleanup_exec` executable in the same order you define them. For example, with this `config.toml` content: ```toml ... [runners.custom] ... cleanup_exec = "/path/to/bin" cleanup_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runner would execute it as `/path/to/bin Arg1 Arg2`. ## Terminating and killing executables GitLab Runner tries to gracefully terminate an executable under any of the following conditions: - `config_exec_timeout`, `prepare_exec_timeout` or `cleanup_exec_timeout` are met. - The job [times out](https://docs.gitlab.com/ci/pipelines/settings/#set-a-limit-for-how-long-jobs-can-run). - The job is canceled. When a timeout is reached, a `SIGTERM` is sent to the executable, and the countdown for [`exec_terminate_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section) starts. The executable should listen to this signal to make sure it cleans up any resources. If `exec_terminate_timeout` passes and the process is still running, a `SIGKILL` is sent to kill the process and [`exec_force_kill_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section) starts. If the process is still running after `exec_force_kill_timeout` has finished, GitLab Runner abandons the process and doesn't try to stop or kill anymore. If both these timeouts are reached during `config_exec`, `prepare_exec` or `run_exec` the build is marked as failed. Any child process that is spawned by the driver also receives the graceful termination process explained above on UNIX based systems. This is achieved by having the main process set as a [process group](https://man7.org/linux/man-pages/man2/setpgid.2.html) which all the child processes belong too. ## Error handling GitLab Runner can handle two type of errors differently. These errors are only handled when the executable inside of `config_exec`, `prepare_exec`, `run_exec`, and `cleanup_exec` exits with these codes. If the user exits with a non-zero exit code, it should be propagated as one of the error codes below. If the user script exits with one of these code it has to be propagated to the executable exit code. ### Build Failure GitLab Runner provides `BUILD_FAILURE_EXIT_CODE` environment variable that your executable should use as an exit code to indicate job failure. If the executable exits with the code from `BUILD_FAILURE_EXIT_CODE`, the build is marked as a failure appropriately in GitLab CI. If the script that the user defines inside of `.gitlab-ci.yml` file exits with a non-zero code, `run_exec` should exit with `BUILD_FAILURE_EXIT_CODE` value. > [!note] > We strongly suggest using `BUILD_FAILURE_EXIT_CODE` to exit > instead of a hard coded value because it can change in any release, making > your binary/script future proof. ### Build failure exit code You can optionally supply a file that contains the exit code when a build fails. The expected path for the file is provided through the `BUILD_EXIT_CODE_FILE` environment variable. For example: ```shell if [ $exit_code -ne 0 ]; then echo $exit_code > ${BUILD_EXIT_CODE_FILE} exit ${BUILD_FAILURE_EXIT_CODE} fi ``` CI/CD jobs require this method to leverage the [`allow_failure`](https://docs.gitlab.com/ci/yaml/#allow_failure) syntax. > [!note] > Store only the integer exit code in this file. Additional information might > result in an `unknown Custom executor executable exit code` error. ### System Failure You can send a system failure to GitLab Runner by exiting the process with the error code specified in the `SYSTEM_FAILURE_EXIT_CODE`. If this error code is returned, GitLab Runner retries certain stages. If none of the retries are successful, the job is marked as failed. Below is a table of what stages are retried, and by how many times. | Stage Name | Number of attempts | Duration to wait between each retry | |----------------------|-------------------------------------------------------------|-------------------------------------| | `prepare_exec` | 3 | 3 seconds | | `get_sources` | Value of `GET_SOURCES_ATTEMPTS` variable. (Default 1) | 0 seconds | | `restore_cache` | Value of `RESTORE_CACHE_ATTEMPTS` variable. (Default 1) | 0 seconds | | `download_artifacts` | Value of `ARTIFACT_DOWNLOAD_ATTEMPTS` variable. (Default 1) | 0 seconds | > [!note] > We strongly suggest using `SYSTEM_FAILURE_EXIT_CODE` to exit > instead of a hard coded value because it can change in any release, making > your binary/script future proof. ## Job response You can change job-level `CUSTOM_ENV_` variables as they observe the documented [CI/CD variable precedence](https://docs.gitlab.com/ci/variables/#cicd-variable-precedence). Though this functionality can be desirable, when the trusted job context is required, the full JSON job response is provided automatically. The runner generates a temporary file, which is referenced in the `JOB_RESPONSE_FILE` environment variable. This file exists in every stage and is automatically removed during cleanup. ```shell $ cat ${JOB_RESPONSE_FILE} {"id": 123456, "token": "jobT0ken",...} ``` ================================================ FILE: docs/executors/custom_examples/libvirt.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Using libvirt with the Custom executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Using [libvirt](https://libvirt.org/), the Custom executor driver will create a new disk and VM for every job it executes, after which the disk and VM will be deleted. This document does not try to explain how to set up libvirt, since it's out of scope. However, this driver was tested using [GCP Nested Virtualization](https://docs.cloud.google.com/compute/docs/instances/nested-virtualization/overview), which also has [details on how to set up libvirt](https://docs.cloud.google.com/compute/docs/instances/nested-virtualization/overview#starting_a_private_bridge_between_the_host_and_nested_vms) with bridge networking. This example will use the `default` network that comes with when installing libvirt so make sure it's running. This driver requires bridge networking since each VM needs to have it's own dedicated IP address so GitLab Runner can SSH inside of it to run commands. An SSH key can be generated [using the following commands](https://docs.gitlab.com/user/ssh/#generate-an-ssh-key-pair). A base disk VM image is created so that dependencies are not downloaded every build. In the following example, [virt-builder](https://libguestfs.org/virt-builder.1.html) is used to create a disk VM image. ```shell virt-builder debian-12 \ --size 8G \ --output /var/lib/libvirt/images/gitlab-runner-base.qcow2 \ --format qcow2 \ --hostname gitlab-runner-bookworm \ --network \ --install curl \ --run-command 'curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" | bash' \ --run-command 'curl -s "https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh" | bash' \ --run-command 'useradd -m -p "" gitlab-runner -s /bin/bash' \ --install gitlab-runner,git,git-lfs,openssh-server \ --run-command "git lfs install --skip-repo" \ --ssh-inject gitlab-runner:file:/root/.ssh/id_rsa.pub \ --run-command "echo 'gitlab-runner ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers" \ --run-command "sed -E 's/GRUB_CMDLINE_LINUX=\"\"/GRUB_CMDLINE_LINUX=\"net.ifnames=0 biosdevname=0\"/' -i /etc/default/grub" \ --run-command "grub-mkconfig -o /boot/grub/grub.cfg" \ --run-command "echo 'auto eth0' >> /etc/network/interfaces" \ --run-command "echo 'allow-hotplug eth0' >> /etc/network/interfaces" \ --run-command "echo 'iface eth0 inet dhcp' >> /etc/network/interfaces" ``` The command above will install all the [prerequisites](../custom.md#prerequisite-software-for-running-a-job) specified earlier. `virt-builder` will set a root password automatically which is printed at the end. If you want to specify a password yourself, pass [`--root-password password:$SOME_PASSWORD`](https://libguestfs.org/virt-builder.1.html#setting-the-root-password). ## Configuration The following is an example of a GitLab Runner configuration for libvirt: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "libvirt-driver" url = "https://gitlab.com/" token = "xxxxx" executor = "custom" builds_dir = "/home/gitlab-runner/builds" cache_dir = "/home/gitlab-runner/cache" [runners.custom_build_dir] [runners.cache] [runners.cache.s3] [runners.cache.gcs] [runners.custom] prepare_exec = "/opt/libvirt-driver/prepare.sh" # Path to a bash script to create VM. run_exec = "/opt/libvirt-driver/run.sh" # Path to a bash script to run script inside of VM over ssh. cleanup_exec = "/opt/libvirt-driver/cleanup.sh" # Path to a bash script to delete VM and disks. ``` ## Base Each stage ([prepare](#prepare), [run](#run), and [cleanup](#cleanup)) will use the base script below to generate variables that are used throughout other scripts. It's important that this script is located in the same directory as the other scripts, in this case `/opt/libvirt-driver/`. ```shell #!/usr/bin/env bash # /opt/libvirt-driver/base.sh VM_IMAGES_PATH="/var/lib/libvirt/images" BASE_VM_IMAGE="$VM_IMAGES_PATH/gitlab-runner-base.qcow2" VM_ID="runner-$CUSTOM_ENV_CI_RUNNER_ID-project-$CUSTOM_ENV_CI_PROJECT_ID-concurrent-$CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID-job-$CUSTOM_ENV_CI_JOB_ID" VM_IMAGE="$VM_IMAGES_PATH/$VM_ID.qcow2" _get_vm_ip() { virsh -q domifaddr "$VM_ID" | awk '{print $4}' | sed -E 's|/([0-9]+)?$||' } ``` ## Prepare The prepare script: - Copies the disk to a new path. - Installs a new VM from the copied disk. - Waits for the VM to get an IP. - Waits for SSH to respond on the VM. ```shell #!/usr/bin/env bash # /opt/libvirt-driver/prepare.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. set -eo pipefail # trap any error, and mark it as a system failure. trap "exit $SYSTEM_FAILURE_EXIT_CODE" ERR # Copy base disk to use for Job. qemu-img create -f qcow2 -b "$BASE_VM_IMAGE" "$VM_IMAGE" -F qcow2 # Install the VM # To boot VM in UEFI mode, add: --boot uefi virt-install \ --name "$VM_ID" \ --os-variant debian11 \ --disk "$VM_IMAGE" \ --import \ --vcpus=2 \ --ram=2048 \ --network default \ --graphics none \ --noautoconsole # Wait for VM to get IP echo 'Waiting for VM to get IP' for i in $(seq 1 300); do VM_IP=$(_get_vm_ip) if [ -n "$VM_IP" ]; then echo "VM got IP: $VM_IP" break fi if [ "$i" == "300" ]; then echo 'Waited 300 seconds for VM to start, exiting...' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done # Wait for ssh to become available echo "Waiting for sshd to be available" for i in $(seq 1 300); do if ssh -i /root/.ssh/id_rsa -o StrictHostKeyChecking=no gitlab-runner@$VM_IP >/dev/null 2>/dev/null; then break fi if [ "$i" == "300" ]; then echo 'Waited 300 seconds for sshd to start, exiting...' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done ``` ## Run This will run the script generated by GitLab Runner by sending the content of the script to the VM via `STDIN` through SSH. ```shell #!/usr/bin/env bash # /opt/libvirt-driver/run.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. VM_IP=$(_get_vm_ip) ssh -i /root/.ssh/id_rsa -o StrictHostKeyChecking=no gitlab-runner@$VM_IP /bin/bash < "${1}" if [ $? -ne 0 ]; then # Exit using the variable, to make the build as failure in GitLab # CI. exit "$BUILD_FAILURE_EXIT_CODE" fi ``` ## Cleanup This script removes the VM and deletes the disk. ```shell #!/usr/bin/env bash # /opt/libvirt-driver/cleanup.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. set -eo pipefail # Destroy VM and wait 300 second. for i in $(seq 1 300); do virsh destroy "$VM_ID" >/dev/null 2>&1 if [[ "$(virsh domstate "$VM_ID" 2>/dev/null | tr '[:upper:]' '[:lower:]')" =~ shut\ off|destroyed|^$ ]]; then break fi if [ $i -eq 300 ]; then exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1 done # Undefine VM. virsh undefine "$VM_ID" || virsh undefine "$VM_ID" --nvram # Delete VM disk. if [ -f "$VM_IMAGE" ]; then rm "$VM_IMAGE" fi ``` ================================================ FILE: docs/executors/custom_examples/lxd.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Using LXD with the Custom executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} In this example, we use LXD to create a container per build and clean it up afterwards. This example uses a bash script for each stage. You can specify your own image, which is exposed as [CI_JOB_IMAGE](https://docs.gitlab.com/ci/variables/predefined_variables/). This example uses the `ubuntu:22.04` image for simplicity. If you want to support multiple images, you would have to modify the executor. These scripts have the following prerequisites: - [LXD](https://ubuntu.com/lxd) - [GitLab Runner](../../install/linux-manually.md) ## Configuration ```toml [[runners]] name = "lxd-driver" url = "https://gitlab.example.com" token = "xxxxxxxxxxx" executor = "custom" builds_dir = "/builds" cache_dir = "/cache" [runners.custom] prepare_exec = "/opt/lxd-driver/prepare.sh" # Path to a bash script to create lxd container and download dependencies. run_exec = "/opt/lxd-driver/run.sh" # Path to a bash script to run script inside the container. cleanup_exec = "/opt/lxd-driver/cleanup.sh" # Path to bash script to delete container. ``` ## Base Each stage [prepare](#prepare), [run](#run), and [cleanup](#cleanup) will use this script to generate variables that are used throughout the scripts. It's important that this script is located in the same directory as the other scripts, in this case `/opt/lxd-driver/`. ```shell #!/usr/bin/env bash # /opt/lxd-driver/base.sh CONTAINER_ID="runner-$CUSTOM_ENV_CI_RUNNER_ID-project-$CUSTOM_ENV_CI_PROJECT_ID-concurrent-$CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID-$CUSTOM_ENV_CI_JOB_ID" ``` ## Prepare The prepare script will do the following: - Destroy a container with the same name if there is one running. - Start a container and wait for it to start. - Install [prerequisite dependencies](../custom.md#prerequisite-software-for-running-a-job). ```shell #!/usr/bin/env bash # /opt/lxd-driver/prepare.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. set -eo pipefail # trap any error, and mark it as a system failure. trap "exit $SYSTEM_FAILURE_EXIT_CODE" ERR start_container () { if lxc info "$CONTAINER_ID" >/dev/null 2>/dev/null ; then echo 'Found old container, deleting' lxc delete -f "$CONTAINER_ID" fi # The container image is hardcoded, but you can use # the `CI_JOB_IMAGE` predefined variable # https://docs.gitlab.com/ci/variables/predefined_variables/ # which is available under `CUSTOM_ENV_CI_JOB_IMAGE` to allow the # user to specify the image. The rest of the script assumes that # you are running on an ubuntu image so modifications might be # required. lxc launch ubuntu:22.04 "$CONTAINER_ID" # Wait for container to start, we are using systemd to check this, # for the sake of brevity. for i in $(seq 1 10); do if lxc exec "$CONTAINER_ID" -- sh -c "systemctl isolate multi-user.target" >/dev/null 2>/dev/null; then break fi if [ "$i" == "10" ]; then echo 'Waited for 10 seconds to start container, exiting..' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done } install_dependencies () { # Install Git LFS, git comes pre installed with ubuntu image. lxc exec "$CONTAINER_ID" -- sh -c 'curl -s "https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh" | sudo bash' lxc exec "$CONTAINER_ID" -- sh -c "apt-get install -y git-lfs" # Install gitlab-runner binary since we need for cache/artifacts. lxc exec "$CONTAINER_ID" -- sh -c 'curl -L --output /usr/local/bin/gitlab-runner "https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64"' lxc exec "$CONTAINER_ID" -- sh -c "chmod +x /usr/local/bin/gitlab-runner" } echo "Running in $CONTAINER_ID" start_container install_dependencies ``` ## Run This will run the script generated by GitLab Runner by sending the content of the script to the container via `STDIN`. ```shell #!/usr/bin/env bash # /opt/lxd-driver/run.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. lxc exec "$CONTAINER_ID" /bin/bash < "${1}" if [ $? -ne 0 ]; then # Exit using the variable, to make the build as failure in GitLab # CI. exit $BUILD_FAILURE_EXIT_CODE fi ``` ## Cleanup Destroy the container since the build has finished. ```shell #!/usr/bin/env bash # /opt/lxd-driver/cleanup.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. echo "Deleting container $CONTAINER_ID" lxc delete -f "$CONTAINER_ID" ``` ================================================ FILE: docs/executors/docker.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Docker executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner uses the Docker executor to run jobs on Docker images. You can use the Docker executor to: - Maintain the same build environment for each job. - Use the same image to test commands locally without the requirement of running a job in the CI server. The Docker executor uses [Docker Engine](https://www.docker.com/products/container-runtime/) to run each job in a separate and isolated container. To connect to Docker Engine, the executor uses: - The image and services you define in [`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/). - The configurations you define in [`config.toml`](../commands/_index.md#configuration-file). You can't register a runner and its Docker executor without defining a default image in `config.toml`. The image defined in `config.toml` can be used when none is defined in `.gitlab-ci.yml`. If an image is defined in `.gitlab-ci.yml`, it overrides the one defined in `config.toml`. Prerequisites: - [Install Docker](https://docs.docker.com/engine/install/). ## Docker executor workflow The Docker executor uses a Docker image based on [Alpine Linux](https://alpinelinux.org/) that contains the tools to run the prepare, pre-job, and post-job steps. To view the definition of the special Docker image, see the [GitLab Runner repository](https://gitlab.com/gitlab-org/gitlab-runner/-/tree/main/dockerfiles/runner-helper). The Docker executor divides the job into several steps: 1. **Prepare**: Creates and starts the [services](https://docs.gitlab.com/ci/yaml/#services). 1. **Pre-job**: Clones, restores [cache](https://docs.gitlab.com/ci/yaml/#cache), and downloads [artifacts](https://docs.gitlab.com/ci/yaml/#artifacts) from previous stages. Runs on a special Docker image. 1. **Job**: Runs your build in the Docker image you configure for the runner. 1. **Post-job**: Create cache, upload artifacts to GitLab. Runs on a special Docker Image. ## Supported configurations The Docker executor supports the following configurations. For known issues and additional requirements of Windows configurations, see [Use Windows containers](#use-windows-containers). | Runner is installed on: | Executor is: | Container is running: | |-------------------------|------------------|-----------------------| | Windows | `docker-windows` | Windows | | Windows | `docker` | Linux | | Linux | `docker` | Linux | | macOS | `docker` | Linux | These configurations are **not** supported: | Runner is installed on: | Executor is: | Container is running: | |-------------------------|------------------|-----------------------| | Linux | `docker-windows` | Linux | | Linux | `docker` | Windows | | Linux | `docker-windows` | Windows | | Windows | `docker` | Windows | | Windows | `docker-windows` | Linux | > [!note] > GitLab Runner uses Docker Engine API > [v1.25](https://docs.docker.com/reference/api/engine/version/v1.25/) to talk to the Docker > Engine. This means the > [minimum supported version](https://docs.docker.com/reference/api/engine/#api-version-matrix) > of Docker on a Linux server is `1.13.0`. > On Windows Server, [it needs to be more recent](#supported-docker-versions) > to identify the Windows Server version. ## Use the Docker executor To use the Docker executor, manually define Docker as the executor in `config.toml` or use the [`gitlab-runner register --executor "docker"`](../register/_index.md#register-with-a-runner-authentication-token) command to automatically define it. The following sample configuration shows Docker defined as the executor. For more information about these values, see [Advanced configuration](../configuration/advanced-configuration.md) ```toml concurrent = 4 [[runners]] name = "myRunner" url = "https://gitlab.com/ci" token = "......" executor = "docker" [runners.docker] tls_verify = true image = "my.registry.tld:5000/alpine:latest" privileged = false disable_entrypoint_overwrite = false oom_kill_disable = false disable_cache = false volumes = [ "/cache", ] shm_size = 0 allowed_pull_policies = ["always", "if-not-present"] allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] [runners.docker.volume_driver_ops] "size" = "50G" ``` ## Configure images and services Prerequisites: - The image where your job runs must have a working shell in its operating system `PATH`. Supported shells are: - For Linux: - `sh` - `bash` - PowerShell Core (`pwsh`). [Introduced in 13.9](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4021). - For Windows: - PowerShell (`powershell`) - PowerShell Core (`pwsh`). [Introduced in 13.6](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/13139). To configure the Docker executor, you define the Docker images and services in [`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/) and [`config.toml`](../commands/_index.md#configuration-file). Use the following keywords: - `image`: The name of the Docker image that the runner uses to run jobs. - Enter an image from the local Docker Engine, or any image in Docker Hub. For more information, see the [Docker documentation](https://docs.docker.com/get-started/introduction/). - To define the image version, use a colon (`:`) to add a tag. If you don't specify a tag, Docker uses `latest` as the version. - `services`: The additional image that creates another container and links to the `image`. For more information about types of services, see [Services](https://docs.gitlab.com/ci/services/). ### Define images and services in `.gitlab-ci.yml` Define an image that the runner uses for all jobs and a list of services to use during build time. Example: ```yaml image: ruby:3.3 services: - postgres:9.3 before_script: - bundle install test: script: - bundle exec rake spec ``` To define different images and services per job: ```yaml before_script: - bundle install test:3.3: image: ruby:3.3 services: - postgres:9.3 script: - bundle exec rake spec test:3.4: image: ruby:3.4 services: - postgres:9.4 script: - bundle exec rake spec ``` If you don't define an `image` in `.gitlab-ci.yml`, the runner uses the `image` defined in `config.toml`. ### Define images and services in `config.toml` To add images and services to all jobs run by a runner, update `[runners.docker]` in the `config.toml`. By default, the Docker executer uses the `image` defined in `.gitlab-ci.yml`. If you don't define one in `.gitlab-ci.yml`, the runner uses the image defined in `config.toml`. Example: ```toml [runners.docker] image = "ruby:3.3" [[runners.docker.services]] name = "mysql:latest" alias = "db" [[runners.docker.services]] name = "redis:latest" alias = "cache" ``` This example uses the [array of tables syntax](https://toml.io/en/v0.4.0#array-of-tables). ### Define an image from a private registry Prerequisites: - To access images from a private registry, you must [authenticate GitLab Runner](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry). To define an image from a private registry, provide the registry name and the image in `.gitlab-ci.yml`. Example: ```yaml image: my.registry.tld:5000/namespace/image:tag ``` In this example, GitLab Runner searches the registry `my.registry.tld:5000` for the image `namespace/image:tag`. ## Network configurations You must configure a network to connect services to a CI/CD job. To configure a network, you can either: - Recommended. Configure the runner to create a network for each job. - Define container links. Container links are a legacy feature of Docker. ### Create a network for each job You can configure the runner to create a network for each job. When you enable this networking mode, the runner creates and uses a user-defined Docker bridge network for each job. Docker environment variables are not shared across the containers. For more information about user-defined bridge networks, see the [Docker documentation](https://docs.docker.com/engine/network/drivers/bridge/). To use this networking mode, enable `FF_NETWORK_PER_BUILD` in either the feature flag or the environment variable in the `config.toml`. Do not set the `network_mode`. Example: ```toml [[runners]] (...) executor = "docker" environment = ["FF_NETWORK_PER_BUILD=1"] ``` Or: ```toml [[runners]] (...) executor = "docker" [runners.feature_flags] FF_NETWORK_PER_BUILD = true ``` To set the default Docker address pool, use `default-address-pool` in [`dockerd`](https://docs.docker.com/reference/cli/dockerd/). If CIDR ranges are already used in the network, Docker networks may conflict with other networks on the host, including other Docker networks. This feature works only when the Docker daemon is configured with IPv6 enabled. To enable IPv6 support, set `enable_ipv6` to `true` in the Docker configuration. For more information, see the [Docker documentation](https://docs.docker.com/engine/daemon/ipv6/). The runner uses the `build` alias to resolve the job container. DNS might not work correctly with a Docker-in-Docker (`dind`) service when you use this feature. This behavior is due to an issue with [Docker/Moby](https://github.com/moby/moby/issues/20037#issuecomment-181659049), where `dind` containers don't inherit custom DNS entries when you specify a network. As a workaround, manually provide the custom DNS settings to the `dind` service. For example, if your custom DNS server is `1.1.1.1`, you can use `127.0.0.11`, which is Docker's internal DNS service: ```yaml services: - name: docker:dind command: [--dns=127.0.0.11, --dns=1.1.1.1] ``` This approach also allows containers to resolve services on the same network. #### How the runner creates a network for each job When a job starts, the runner: 1. Creates a bridge network, similar to the Docker command `docker network create `. 1. Connects the service and containers to the bridge network. 1. Removes the network at the end of the job. The container running the job and the containers running the service resolve each other's hostnames and aliases. This functionality is [provided by Docker](https://docs.docker.com/engine/network/drivers/bridge/#differences-between-user-defined-bridges-and-the-default-bridge). ### Configure a network with container links GitLab Runner before 18.7.0 uses the default Docker `bridge` along with [legacy container links](https://docs.docker.com/engine/network/links/) to link the job container with the services. Because Docker deprecated the links functionality, in GitLab Runner 18.7.0 and later, the legacy container link behavior is emulated by allowing service aliases to be resolved using Docker's `extra_hosts` functionality. This network mode is the default if [`FF_NETWORK_PER_BUILD`](#create-a-network-for-each-job) is disabled. The GitLab Runner emulated link behavior differs slightly from [legacy container links](https://docs.docker.com/engine/network/links/): - Disabling `icc` disables inter-container communication and containers cannot communicate with each other. - Environment variables for the linked containers are no longer present (`_PORT__`). To configure the network, specify the [networking mode](https://docs.docker.com/engine/containers/run/#network-settings) in the `config.toml` file: - `bridge`: Use the bridge network. Default. - `host`: Use the host's network stack inside the container. - `none`: No networking. Not recommended. Example: ```toml [[runners]] (...) executor = "docker" [runners.docker] network_mode = "bridge" ``` If you use any other `network_mode` value, these are taken as the name of an already existing Docker network, which the build container connects to. During name resolution, Docker updates the `/etc/hosts` file in the container with the service container hostname and alias. However, the service container is **not** able to resolve the container name. To resolve the container name, you must create a network for each job. Linked containers share their environment variables. #### Overriding the MTU of the created network For some environments, like virtual machines in OpenStack, a custom MTU is necessary. The Docker daemon does not respect the MTU in `docker.json` (see [Moby issue 34981](https://github.com/moby/moby/issues/34981)). You can set `network_mtu` in your `config.toml` to any valid value so the Docker daemon can use the correct MTU for the newly created network. You must also enable [`FF_NETWORK_PER_BUILD`](#create-a-network-for-each-job) for the override to take effect. The following configuration sets the MTU to `1402` for the network created for each job. Make sure to adjust the value to your specific environment requirements. ```toml [[runners]] (...) executor = "docker" [runners.docker] network_mtu = 1402 [runners.feature_flags] FF_NETWORK_PER_BUILD = true ``` ## Restrict Docker images and services To restrict Docker images and services, specify a wildcard pattern in the `allowed_images` and `allowed_services` parameters. For more details on syntax, see [doublestar documentation](https://github.com/bmatcuk/doublestar). For example, to allow images from your private Docker registry only: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] ``` To restrict to a list of images from your private Docker registry: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["my.registry.tld:5000/ruby:*", "my.registry.tld:5000/node:*"] allowed_services = ["postgres:9.4", "postgres:latest"] ``` To exclude specific images like Kali: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["**", "!*/kali*"] ``` ## Access services hostnames To access a service hostname, add the service to `services` in `.gitlab-ci.yml`. For example, to use a Wordpress instance to test an API integration with your application, use [tutum/wordpress](https://hub.docker.com/r/tutum/wordpress/) as the service image: ```yaml services: - tutum/wordpress:latest ``` When the job runs, the `tutum/wordpress` service starts. You can access it from your build container under the hostname `tutum__wordpress` and `tutum-wordpress`. In addition to the specified service aliases, the runner assigns the name of the service image as an alias to the service container. You can use any of these aliases. The runner uses the following rules to create the alias based on the image name: - Everything after `:` is stripped. - For the first alias, the slash (`/`) is replaced with double underscores (`__`). - For the second alias, the slash (`/`) is replaced with a single dash (`-`). If you use a private service image, the runner strips any specified port and applies the rules. The service `registry.gitlab-wp.com:4999/tutum/wordpress` results in the hostname `registry.gitlab-wp.com__tutum__wordpress` and `registry.gitlab-wp.com-tutum-wordpress`. ## Configuring services To change database names or set account names, you can define environment variables for the service. When the runner passes variables: - Variables are passed to all containers. The runner cannot pass variables to specific containers. - Secure variables are passed to the build container. For more information about configuration variables, see the documentation of each image provided in their corresponding Docker Hub page. ### Mount a directory in RAM You can use the `tmpfs` option to mount a directory in RAM. This speeds up the time required to test if there is a lot of I/O related work, such as with databases. If you use the `tmpfs` and `services_tmpfs` options in the runner configuration, you can specify multiple paths, each with its own options. For more information, see the [Docker documentation](https://docs.docker.com/reference/cli/docker/container/run/#tmpfs). For example, to mount the data directory for the official MySQL container in RAM, configure the `config.toml`: ```toml [runners.docker] # For the main container [runners.docker.tmpfs] "/var/lib/mysql" = "rw,noexec" # For services [runners.docker.services_tmpfs] "/var/lib/mysql" = "rw,noexec" ``` ### Building a directory in a service GitLab Runner mounts a `/builds` directory to all shared services. For more information about using different services see: - [Using PostgreSQL](https://docs.gitlab.com/ci/services/postgres/) - [Using MySQL](https://docs.gitlab.com/ci/services/mysql/) ### How GitLab Runner performs the services health check {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4079) multiple port checks in GitLab 16.0. {{< /history >}} After the service starts, GitLab Runner waits for the service to respond. The Docker executor tries to open a TCP connection to the exposed service port in the service container. - In GitLab 15.11 and earlier, only the first exposed port is checked. - In GitLab 16.0 and later, the first 20 exposed ports are checked. The `HEALTHCHECK_TCP_PORT` service variable can be used to perform the health check on a specific port: ```yaml job: services: - name: mongo variables: HEALTHCHECK_TCP_PORT: "27017" ``` To see how this is implemented, use the health check [Go command](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/commands/helpers/health_check.go). ## Specify Docker driver operations Specify arguments to supply to the Docker volume driver when you create volumes for builds. For example, you can use these arguments to limit the space for each build to run, in addition to all other driver specific options. The following example shows a `config.toml` where the limit that each build can consume is set to 50 GB. ```toml [runners.docker] [runners.docker.volume_driver_ops] "size" = "50G" ``` ## Using host devices {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6208) in GitLab 17.10. {{< /history >}} You can expose hardware devices on the GitLab Runner host to the container that runs the job. To do this, configure the runner's `devices` and `services_devices` options. - To expose devices to `build` and [helper](../configuration/advanced-configuration.md#helper-image) containers, use the `devices` option. - To expose devices to services containers, use the `services_devices` option. To restrict a service container's device access to specific images, use exact image names or glob patterns. This action prevents direct access to host system devices. For more information on device access, see [Docker documentation](https://docs.docker.com/reference/cli/docker/container/run/#device). ### Build container example In this example, the `config.toml` section exposes `/dev/bus/usb` to build containers. This configuration allows pipelines to access USB devices attached to the host machine, such as Android smartphones controlled over the [Android Debug Bridge (`adb`)](https://developer.android.com/tools/adb). Since build job containers can directly access host USB devices, simultaneous pipeline executions may conflict with each other when accessing the same hardware. To prevent these conflicts, use [`resource_group`](https://docs.gitlab.com/ci/yaml/#resource_group). ```toml [[runners]] name = "hardware-runner" url = "https://gitlab.com" token = "__REDACTED__" executor = "docker" [runners.docker] # All job containers may access the host device devices = ["/dev/bus/usb"] ``` ### Private registry example This example shows how to expose `/dev/kvm` and `/dev/dri` devices to container images from a private Docker registry. These devices are commonly used for hardware-accelerated virtualization and rendering. To mitigate risks involved with providing users direct access to hardware resources, restrict device access to trusted images in the `myregistry:5000/emulator/*` namespace: ```toml [runners.docker] [runners.docker.services_devices] # Only images from an internal registry may access the host devices "myregistry:5000/emulator/*" = ["/dev/kvm", "/dev/dri"] ``` > [!warning] > The image name `**/*` might expose devices to any image. ## Configure directories for the container build and cache To define where data is stored in the container, configure `/builds` and `/cache` directories in the `[[runners]]` section in `config.toml`. If you modify the `/cache` storage path, to mark the path as persistent you must define it in `volumes = ["/my/cache/"]`, under the `[runners.docker]` section in `config.toml`. By default, the Docker executor stores builds and caches in the following directories: - Builds in `/builds//` - Caches in `/cache` inside the container. ## Clear the Docker cache Use [`clear-docker-cache`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/packaging/root/usr/share/gitlab-runner/clear-docker-cache) to remove unused containers and volumes created by the runner. For a list of options, run the script with the `help` option: ```shell clear-docker-cache help ``` The default option is `prune-volumes`, which removes all unused containers (dangling and unreferenced) and volumes. To manage cache storage efficiently, you should: - Run `clear-docker-cache` with `cron` regularly (for example, once a week). - Maintain some recent containers in the cache for performance while you reclaim disk space. The `FILTER_FLAG` environment variable controls which objects are pruned. For example usage, see the [Docker image prune](https://docs.docker.com/reference/cli/docker/image/prune/#filter) documentation. ## Clear Docker build images The [`clear-docker-cache`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/packaging/root/usr/share/gitlab-runner/clear-docker-cache) script does not remove Docker images because they are not tagged by the GitLab Runner. To clear Docker build images: 1. Confirm what disk space can be reclaimed: ```shell clear-docker-cache space Show docker disk usage ---------------------- TYPE TOTAL ACTIVE SIZE RECLAIMABLE Images 14 9 1.306GB 545.8MB (41%) Containers 19 18 115kB 0B (0%) Local Volumes 0 0 0B 0B Build Cache 0 0 0B 0B ``` 1. To remove all unused containers, networks, images (dangling and unreferenced), and untagged volumes, run [`docker system prune`](https://docs.docker.com/reference/cli/docker/system/prune/). ## Persistent storage The Docker executor provides persistent storage when it runs containers. All directories defined in `volumes =` are persistent between builds. The `volumes` directive supports the following types of storage: - For dynamic storage, use ``. The `` is persistent between subsequent runs of the same concurrent job for that project. If you don't set `runners.docker.cache_dir`, the data persists in Docker volumes. Otherwise, it persists in the configured directory on the host (mounted into the build container). Volume names for volume-based persistent storage: - For GitLab Runner before 18.4.0: `runner--project--concurrent--cache-` - For GitLab Runner 18.4.0 and later: `runner--cache-` Data that is no longer human readable in the volume name is moved to the volume's labels. Host directories for host-based persistent storage: - For GitLab Runner before 18.4.0: `/runner--project--concurrent-/` - For GitLab Runner 18.4.0 and later: `/runner-/` Description of the variable parts: - ``: The shortened version of the runner's token (first 8 letters) - ``: The ID of the GitLab project - ``: The index of the runner from the list of all runners that run a build for the same project concurrently (accessible through the `CI_CONCURRENT_PROJECT_ID` [pre-defined variable](https://docs.gitlab.com/ci/variables/predefined_variables/)). - ``: The MD5 sum of the path within the container - ``: The hash for the following data: - Runner's token - Runner's system ID - `` - `` - ``: The value is empty for builds on unprotected branches, and `-protected` for protected branch builds - ``: The configuration in `runners.docker.cache_dir` - For host-bound storage, use `:[:]`. GitLab Runner binds the `` to `` on the host system. The optional `` specifies whether this storage is read-only or read-write (default). > [!warning] > In GitLab Runner 18.4 and later, the naming of sources for dynamic storage (see above) changed > for both Docker volume-based and host directory-based persistent storage. When you upgrade > to 18.4.0, GitLab Runner ignores the cached data from previous runner versions and creates > new dynamic storage on-demand, either through new Docker volumes or new host directories. > > Host-bound storage (with a `` configuration), in contrast to dynamic > storage, is not affected. ### Persistent storage for builds If you make the `/builds` directory a host-bound storage, your builds are stored in: `/builds////`, where: - `` is a shortened version of the Runner's token (first 8 letters). - `` is the index of the runner from the list of all runners that run a build for the same project concurrently (accessible through the `CI_CONCURRENT_PROJECT_ID` [pre-defined variable](https://docs.gitlab.com/ci/variables/predefined_variables/)). ## IPC mode The Docker executor supports sharing the IPC namespace of containers with other locations. This maps to the `docker run --ipc` flag. More details on [IPC settings in Docker documentation](https://docs.docker.com/engine/containers/run/#ipc-settings---ipc) ## Privileged mode The Docker executor supports several options that allows fine-tuning of the build container. One of these options is the [`privileged` mode](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities). ### Use Docker-in-Docker with privileged mode The configured `privileged` flag is passed to the build container and all services. With this flag, you can use the Docker-in-Docker approach. First, configure your runner (`config.toml`) to run in `privileged` mode: ```toml [[runners]] executor = "docker" [runners.docker] privileged = true ``` Then, make your build script (`.gitlab-ci.yml`) to use Docker-in-Docker container: ```yaml image: docker:git services: - docker:dind build: script: - docker build -t my-image . - docker push my-image ``` > [!warning] > Containers that run in privileged mode have security risks. > When your containers run in privileged mode, you disable the > container security mechanisms and expose your host to privilege escalation. > Running containers in privileged mode can lead to container breakout. For more information, > see the Docker documentation about > [runtime privilege and Linux capabilities](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities). You might need to [configure Docker in Docker with TLS, or disable TLS](https://docs.gitlab.com/ci/docker/using_docker_build/#use-the-docker-executor-with-docker-in-docker) to avoid an error similar to the following: ```plaintext Cannot connect to the Docker daemon at tcp://docker:2375. Is the docker daemon running? ``` ### Use rootless Docker-in-Docker with restricted privileged mode In this version, only Docker-in-Docker rootless images are allowed to run as services in privileged mode. The `services_privileged` and `allowed_privileged_services` configuration parameters limit which containers are allowed to run in privileged mode. To use rootless Docker-in-Docker with restricted privileged mode: 1. In the `config.toml`, configure the runner to use `services_privileged` and `allowed_privileged_services`: ```toml [[runners]] executor = "docker" [runners.docker] services_privileged = true allowed_privileged_services = ["docker.io/library/docker:*-dind-rootless", "docker.io/library/docker:dind-rootless", "docker:*-dind-rootless", "docker:dind-rootless"] ``` 1. In `.gitlab-ci.yml`, edit your build script to use Docker-in-Docker rootless container: ```yaml image: docker:git services: - docker:dind-rootless build: script: - docker build -t my-image . - docker push my-image ``` Only the Docker-in-Docker rootless images you list in `allowed_privileged_services` are allowed to run in privileged mode. All other containers for jobs and services run in unprivileged mode. Because they run as non-root, it's _almost safe_ to use with privileged mode images like Docker-in-Docker rootless or BuildKit rootless. For more information about security issues, see [Security risks for Docker executors](../security/_index.md#usage-of-docker-executor). ## Configure a Docker ENTRYPOINT By default, the Docker executor doesn't override the [`ENTRYPOINT` of a Docker image](https://docs.docker.com/engine/containers/run/#entrypoint-default-command-to-execute-at-runtime). It passes `sh` or `bash` as [`COMMAND`](https://docs.docker.com/engine/containers/run/#cmd-default-command-or-options) to start a container that runs the job script. To ensure a job can run, its Docker image must: - Provide `sh` or `bash` and `grep` - Define an `ENTRYPOINT` that starts a shell when passed `sh`/`bash` as argument The Docker Executor runs the job's container with an equivalent of the following command: ```shell docker run sh -c "echo 'It works!'" # or bash ``` If your Docker image doesn't support this mechanism, you can [override the image's ENTRYPOINT](https://docs.gitlab.com/ci/yaml/#imageentrypoint) in the project configuration as follows: ```yaml # Equivalent of # docker run --entrypoint "" sh -c "echo 'It works!'" image: name: my-image entrypoint: [""] ``` For more information, see [Override the Entrypoint of an image](https://docs.gitlab.com/ci/docker/using_docker_images/#override-the-entrypoint-of-an-image) and [How `CMD` and `ENTRYPOINT` interact in Docker](https://docs.docker.com/reference/dockerfile/#understand-how-cmd-and-entrypoint-interact). ### Job script as ENTRYPOINT You can use `ENTRYPOINT` to create a Docker image that runs the build script in a custom environment, or in secure mode. For example, you can create a Docker image that uses an `ENTRYPOINT` that doesn't execute the build script. Instead, the Docker image executes a predefined set of commands to build the Docker image from your directory. You run the build container in [privileged mode](#privileged-mode), and secure the build environment of the runner. 1. Create a new Dockerfile: ```dockerfile FROM docker:dind ADD / /entrypoint.sh ENTRYPOINT ["/bin/sh", "/entrypoint.sh"] ``` 1. Create a bash script (`entrypoint.sh`) that is used as the `ENTRYPOINT`: ```shell #!/bin/sh dind docker daemon --host=unix:///var/run/docker.sock \ --host=tcp://0.0.0.0:2375 \ --storage-driver=vf & docker build -t "$BUILD_IMAGE" . docker push "$BUILD_IMAGE" ``` 1. Push the image to the Docker registry. 1. Run Docker executor in `privileged` mode. In `config.toml` define: ```toml [[runners]] executor = "docker" [runners.docker] privileged = true ``` 1. In your project use the following `.gitlab-ci.yml`: ```yaml variables: BUILD_IMAGE: my.image build: image: my/docker-build:image script: - Dummy Script ``` ## Use Podman to run Docker commands {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27119) in GitLab 15.3. {{< /history >}} If you have GitLab Runner installed on Linux, your jobs can use Podman to replace Docker as the container runtime in the Docker executor. Prerequisites: - [Podman](https://podman.io/) v4.2.0 or later. - To run [services](#services) with Podman as an executor, enable the [`FF_NETWORK_PER_BUILD` feature flag](#create-a-network-for-each-job). [Docker container links](https://docs.docker.com/engine/network/links/) are legacy and are not supported by [Podman](https://podman.io/). For services that create a network alias, you must install the `podman-plugins` package. > [!note] > Podman uses `aardvark-dns` as the DNS server for containers. > The `aardvark-dns` versions 1.10.0 and earlier cause sporadic DNS resolution failures in CI/CD jobs. > Make sure that you have installed a newer version. > For more information, see [GitHub issue 389](https://github.com/containers/aardvark-dns/issues/389). 1. On your Linux host, install GitLab Runner. If you installed GitLab Runner by using your system's package manager, it automatically creates a `gitlab-runner` user. 1. Sign in as the user who runs GitLab Runner. You must do so in a way that doesn't go around [`pam_systemd`](https://www.freedesktop.org/software/systemd/man/latest/pam_systemd.html). You can use SSH with the correct user. This ensures you can run `systemctl` as this user. 1. Make sure that your system fulfills the prerequisites for [a rootless Podman setup](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md). Specifically, make sure your user has [correct entries in `/etc/subuid` and `/etc/subgid`](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md#etcsubuid-and-etcsubgid-configuration). 1. On the Linux host, [install Podman](https://podman.io/getting-started/installation). 1. Enable and start the Podman socket: ```shell systemctl --user --now enable podman.socket ``` 1. Verify the Podman socket is listening: ```shell systemctl status --user podman.socket ``` 1. Copy the socket string in the `Listen` key through which the Podman API is being accessed. 1. Make sure the Podman socket remains available after the GitLab Runner user is logged out: ```shell sudo loginctl enable-linger gitlab-runner ``` 1. Edit the GitLab Runner `config.toml` file and add the socket value to the host entry in the `[runners.docker]` section. For example: ```toml [[runners]] name = "podman-test-runner-2025-06-07" url = "https://gitlab.com" token = "TOKEN" executor = "docker" [runners.docker] host = "unix:///run/user/1012/podman/podman.sock" tls_verify = false image = "quay.io/podman/stable" privileged = false ``` > [!note] > Set `privileged = false` for standard Podman usage. Set `privileged = true` only if you need to run > [Docker-in-Docker services](#use-docker-in-docker-with-privileged-mode) within your jobs. ### Use Podman to build container images from a Dockerfile The following example uses Podman to build a container image and push the image to the GitLab Container registry. The default container image in the Runner `config.toml` is set to `quay.io/podman/stable`, so that the CI job uses that image to execute the included commands. ```yaml variables: IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG before_script: - podman login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY oci-container-build: stage: build script: - podman build -t $IMAGE_TAG . - podman push $IMAGE_TAG when: manual ``` ### Use Buildah to build container images from a Dockerfile The following example shows how to use Buildah to build a container image and push the image to the GitLab Container registry. ```yaml image: quay.io/buildah/stable variables: IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG before_script: - buildah login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY oci-container-build: stage: build script: - buildah bud -t $IMAGE_TAG . - buildah push $IMAGE_TAG when: manual ``` ### Known issues Unlike Docker, Podman enforces SELinux policies by default. While many pipelines run without issues, some may fail due to SELinux context inheritance when tools use temporary directories. For example, the following pipeline fails under Podman: ```yaml testing: image: alpine:3.20 script: - apk add --no-cache python3 py3-pip - pip3 install --target $CI_PROJECT_DIR requests==2.28.2 ``` The failure occurs because pip uses `/tmp` as a working directory. Files created in `/tmp` inherit its SELinux context, which prevents the container from modifying these files when they're moved to `$CI_PROJECT_DIR`. **Solution:** Add `/tmp` to the volumes in the runner's `config.toml` under the `runners.docker` section: ```toml [[runners]] [runners.docker] volumes = ["/cache", "/tmp"] ``` This addition ensures consistent SELinux contexts across the mounted directories. #### Troubleshooting SELinux Issues Other Podman/SELinux issues may require additional troubleshooting to identify the necessary configuration changes. To test whether a Podman runner issue is SELinux-related, temporarily add the following directive to the runner's `config.toml` under the `runners.docker` section: ```toml [[runners]] [runners.docker] security_opt = ["label:disable"] ``` > [!warning] > This addition turns off SELinux enforcement in the container (which is Docker's default behavior). > Use this configuration only for testing purposes and not as a permanent solution because it can have security implications. #### Configure SELinux MCS If SELinux blocks some write operations (such as reinitializing an existing Git repository), you can force a Multi-Category Security (MCS) on all containers launched by the runner: ```toml [[runners]] [runners.docker] security_opt = ["label=level:s0:c1000"] ``` This option does not disable SELinux, but sets the container's MCS level. This approach is more secure than using `label:disable`. > [!warning] > Multiple containers that use the same MCS category can access the same files tagged with that category. ## Specify which user runs the job By default, the runner runs jobs as the `root` user in the container. To specify a different, non-root user to run the job, use the `USER` directive in the Dockerfile of the Docker image. ```dockerfile FROM amazonlinux RUN ["yum", "install", "-y", "nginx"] RUN ["useradd", "www"] USER "www" CMD ["/bin/bash"] ``` When you use that Docker image to execute your job, it runs as the specified user: ```yaml build: image: my/docker-build:image script: - whoami # www ``` ## Configure how runners pull images Configure the pull policy in the `config.toml` to define how runners pull Docker images from registries. You can set a single policy, [a list of policies](#set-multiple-pull-policies), or [allow specific pull policies](#allow-docker-pull-policies). Use the following values for the `pull_policy`: - [`always`](#set-the-always-pull-policy): Default. Pull an image even if a local image exists. This pull policy does not apply to images specified by their `SHA256` that already exist on disk. - [`if-not-present`](#set-the-if-not-present-pull-policy): Pull an image only when a local version does not exist. - [`never`](#set-the-never-pull-policy): Never pull an image and use only local images. ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "always" # available: always, if-not-present, never ``` ### Set the `always` pull policy The `always` option, which is on by default, always initiates a pull before creating the container. This option makes sure the image is up-to-date, and prevents you from using outdated images even if a local image exists. Use this pull policy if: - Runners must always pull the most recent images. - Runners are publicly available and configured for [auto-scale](../configuration/autoscale.md) or as an instance runner in your GitLab instance. **Do not use** this policy if runners must use locally stored images. Set `always` as the `pull policy` in the `config.toml`: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "always" ``` ### Set the `if-not-present` pull policy When you set the pull policy to `if-not-present`, the runner first checks if a local image exists. If there is no local image, the runner pulls an image from the registry. Use the `if-not-present` policy to: - Use local images but also pull images if a local image does not exist. - Reduce time that runners analyze the difference in image layers for heavy and rarely updated images. In this case, you must manually remove the image regularly from the local Docker Engine store to force the image update. **Do not use** this policy: - For instance runners where different users that use the runner may have access to private images. For more information about security issues, see [Usage of private Docker images with if-not-present pull policy](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy). - If jobs are frequently updated and must be run in the most recent image version. This may result in a network load reduction that outweighs the value of frequent deletion of local images. Set the `if-not-present` policy in the `config.toml`: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "if-not-present" ``` ### Set the `never` pull policy Prerequisites: - Local images must contain an installed Docker Engine and a local copy of used images. When you set the pull policy to `never`, image pulling is disabled. Users can only use images that have been manually pulled on the Docker host where the runner runs. Use the `never` pull policy: - To control the images used by runner users. - For private runners that are dedicated to a project that can only use specific images that are not publicly available on any registries. **Do not use** the `never` pull policy for [auto-scaled](../configuration/autoscale.md) Docker executors. The `never` pull policy is usable only when using a pre-defined cloud instance images for chosen cloud provider. Set the `never` policy in the `config.toml`: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "never" ``` ### Set multiple pull policies You can list multiple pull policies to execute if a pull fails. The runner processes pull policies in the order listed until a pull attempt is successful or the list is exhausted. For example, if a runner uses the `always` pull policy and the registry is not available, you can add the `if-not-present` as a second pull policy. This configuration lets the runner use a locally cached Docker image. For information about the security implications of this pull policy, see [Usage of private Docker images with if-not-present pull policy](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy). To set multiple pull policies, add them as a list in the `config.toml`: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = ["always", "if-not-present"] ``` ### Allow Docker pull policies {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26753) in GitLab 15.1. {{< /history >}} In the `.gitlab-ci.yml` file, you can specify a pull policy. This policy determines how a CI/CD job fetches images. To restrict which pull policies can be used from those specified in the `.gitlab-ci.yml` file, use `allowed_pull_policies`. For example, to allow only the `always` and `if-not-present` pull policies, add them to the `config.toml`: ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_pull_policies = ["always", "if-not-present"] ``` - If you don't specify `allowed_pull_policies`, the list matches the values specified in the `pull_policy` keyword. - If you don't specify `pull_policy`, the default is `always`. - The job uses only the pull policies that are listed in both `pull_policy` and `allowed_pull_policies`. The effective pull policy is determined by comparing the policies specified in [`pull_policy` keyword](#configure-how-runners-pull-images) and `allowed_pull_policies`. GitLab uses the [intersection](https://en.wikipedia.org/wiki/Intersection_(set_theory)) of these two policy lists. For example, if `pull_policy` is `["always", "if-not-present"]` and `allowed_pull_policies` is `["if-not-present"]`, then the job uses only `if-not-present` because it's the only pull policy defined in both lists. - The existing `pull_policy` keyword must include at least one pull policy specified in `allowed_pull_policies`. The job fails if none of the `pull_policy` values match `allowed_pull_policies`. ### Image pull error messages | Error message | Description | |-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| | `Pulling docker image registry.tld/my/image:latest ... ERROR: Build failed: Error: image registry.tld/my/image:latest not found` | The runner cannot find the image. Displays when the `always` pull policy is set | | `Pulling docker image local_image:latest ... ERROR: Build failed: Error: image local_image:latest not found` | The image was built locally and doesn't exist in any public or default Docker registry. Displays when the `always` pull policy is set. | | `Pulling docker image registry.tld/my/image:latest ... WARNING: Cannot pull the latest version of image registry.tld/my/image:latest : Error: image registry.tld/my/image:latest not found WARNING: Locally found image will be used instead.` | The runner has used a local image instead of pulling an image. | | `Pulling docker image local_image:latest ... ERROR: Build failed: Error: image local_image:latest not found` | The image cannot be found locally. Displays when the `never` pull policy is set. | | `WARNING: Failed to pull image with policy "always": Error response from daemon: received unexpected HTTP status: 502 Bad Gateway (docker.go:143:0s) Attempt #2: Trying "if-not-present" pull policy Using locally found image version due to "if-not-present" pull policy` | The runner failed to pull an image and attempts to pull an image by using the next listed pull policy. Displays when multiple pull policies are set. | ## Retry a failed pull To configure a runner to retry a failed image pull, specify the same policy more than once in the `config.toml`. For example, this configuration retries the pull one time: ```toml [runners.docker] pull_policy = ["always", "always"] ``` This setting is similar to [the `retry` directive](https://docs.gitlab.com/ci/yaml/#retry) in the `.gitlab-ci.yml` files of individual projects, but only takes effect if specifically the Docker pull fails initially. ## Use Windows containers To use Windows containers with the Docker executor, note the following information about limitations, supported Windows versions, configuring a Windows Docker executor, and Windows helper images. ### Supported Windows versions GitLab Runner only supports the following versions of Windows which follows our [support lifecycle for Windows](../install/support-policy.md#windows-version-support): - Windows Server 2025 LTSC (24H2) - Windows Server 2022 LTSC (21H2) - Windows Server 2019 LTSC (1809) Windows containers support backward compatibility based on the host OS and isolation mode. Newer hosts can run older container images. For compatibility details, see [Microsoft Windows container version compatibility guidelines](https://learn.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/version-compatibility). You can use various Windows base images, including `Server Core`, `Nano Server`, `Server`, and `Windows`. For example, use the [`Windows Server Core`](https://hub.docker.com/r/microsoft/windows-servercore) images with their compatible OS versions: - `mcr.microsoft.com/windows/servercore:ltsc2025` - `mcr.microsoft.com/windows/servercore:ltsc2025-amd64` - `mcr.microsoft.com/windows/servercore:ltsc2022` - `mcr.microsoft.com/windows/servercore:ltsc2022-amd64` - `mcr.microsoft.com/windows/servercore:1809` - `mcr.microsoft.com/windows/servercore:1809-amd64` - `mcr.microsoft.com/windows/servercore:ltsc2019` ### Supported Docker versions GitLab Runner uses Docker to detect what version of Windows Server is running. Hence, a Windows Server running GitLab Runner must be running a recent version of Docker. A known version of Docker that doesn't work with GitLab Runner is `Docker 17.06`. Docker does not identify the version of Windows Server resulting in the following error: ```plaintext unsupported Windows Version: Windows Server Datacenter ``` [Read more about troubleshooting this](../install/windows.md#docker-executor-unsupported-windows-version). ### Configure a Windows Docker executor > [!note] > When a runner is registered with `c:\\cache` > as a source directory when passing the `--docker-volumes` or > `DOCKER_VOLUMES` environment variable, there is a > [known issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4312). Below is an example of the configuration for a Docker executor running Windows. ```toml [[runners]] name = "windows-docker-2019" url = "https://gitlab.com/" token = "xxxxxxx" executor = "docker-windows" [runners.docker] image = "mcr.microsoft.com/windows/servercore:1809_amd64" volumes = ["c:\\cache"] ``` For other configuration options for the Docker executor, see the [advanced configuration](../configuration/advanced-configuration.md#the-runnersdocker-section) section. ### Windows helper images GitLab Runner provides several helper images tailored for different Windows versions and PowerShell requirements. Available variants: - `gitlab/gitlab-runner-helper:x86_64-vXYZ-nanoserver21H2` - `gitlab/gitlab-runner-helper:x86_64-vXYZ-servercore21H2` - `gitlab/gitlab-runner-helper:x86_64-vXYZ-nanoserver1809` - `gitlab/gitlab-runner-helper:x86_64-vXYZ-servercore1809` > [!note] > Due to Windows container backward compatibility, Windows Server 2025 (24H2) can use the 21H2 (Windows Server 2022) helper images. Choose your helper image based on your shell requirements. The `servercore` image is the default and supports both `PowerShell` and `Pwsh`. For containers that only use `pwsh`, use the lighter `nanoserver` image. ### Services You can use [services](https://docs.gitlab.com/ci/services/) by enabling [a network for each job](#create-a-network-for-each-job). ### Known issues with Docker executor on Windows The following are some limitations of using Windows containers with Docker executor: - Docker-in-Docker is not supported, because it's [not supported](https://github.com/docker-library/docker/issues/49) by Docker itself. - Host device mounting not supported. - When mounting a volume directory it has to exist, or Docker fails to start the container, see [#3754](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3754) for additional detail. - `docker-windows` executor can be run only using GitLab Runner running on Windows. - [Linux containers on Windows](https://learn.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/set-up-linux-containers) are not supported, because they are still experimental. Read [the relevant issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4373) for more details. - Because of a [limitation in Docker](https://github.com/MicrosoftDocs/Virtualization-Documentation/pull/331), if the destination path drive letter is not `c:`, paths are not supported for: - [`builds_dir`](../configuration/advanced-configuration.md#the-runners-section) - [`cache_dir`](../configuration/advanced-configuration.md#the-runners-section) - [`volumes`](../configuration/advanced-configuration.md#volumes-in-the-runnersdocker-section) This means values such as `f:\\cache_dir` are not supported, but `f:` is supported. However, if the destination path is on the `c:` drive, paths are also supported (for example `c:\\cache_dir`). To configure where the Docker daemon keeps images and containers, update the `data-root` parameter in the `daemon.json` file of the Docker daemon. For more information, see [Configure Docker with a configuration file](https://learn.microsoft.com/en-us/virtualization/windowscontainers/manage-docker/configure-docker-daemon#configure-docker-with-a-configuration-file). ## Native Step Runner Integration {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5069) in GitLab 17.6.0 behind the feature-flag `FF_USE_NATIVE_STEPS`, which is disabled by default. - [Updated](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5322) in GitLab 17.9.0. GitLab Runner injects the `step-runner` binary into the build container and adjusts the `$PATH` environment variable accordingly. This enhancement makes it possible to use any image as the build image. {{< /history >}} The Docker executor supports running the [CI/CD steps](https://docs.gitlab.com/ci/steps/) natively by using the `gRPC` API provided by [`step-runner`](https://gitlab.com/gitlab-org/step-runner). To enable this mode of execution, you must specify CI/CD jobs using the `run` keyword instead of the legacy `script` keyword. Additionally, you must enable the `FF_USE_NATIVE_STEPS` feature flag. You can enable this feature flag at either the job or pipeline level. ```yaml step job: stage: test variables: FF_USE_NATIVE_STEPS: true image: name: alpine:latest run: - name: step1 script: pwd - name: step2 script: env - name: step3 script: ls -Rlah ../ ``` ### Known Issues - In GitLab 17.9 and later, the build image must have the `ca-certificates` package installed or the `step-runner` will fail to pull the steps defined in the job. Debian-based Linux distribution for example do not install `ca-certificates` by default. - In GitLab versions before 17.9, the build image must include a `step-runner` binary in `$PATH`. To achieve this, you can either: - Create your own custom build image and include the `step-runner` binary in it. - Use the `registry.gitlab.com/gitlab-org/step-runner:v0` image if it includes the dependencies you need to run your job. - Running a step that runs a Docker container must adhere to the same configuration parameters and constraints as traditional `scripts`. For example, you must use [Docker-in-Docker](#use-docker-in-docker-with-privileged-mode). - This mode of execution does not yet support running [`Github Actions`](https://gitlab.com/components/action-runner). ================================================ FILE: docs/executors/docker_autoscaler.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Docker Autoscaler executor --- {{< history >}} - Introduced in GitLab Runner 15.11.0 as an [experiment](https://docs.gitlab.com/policy/development_stages_support/#experiment). - [Changed](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29404) to [beta](https://docs.gitlab.com/policy/development_stages_support/#beta) in GitLab Runner 16.6. - [Generally available](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29221) in GitLab Runner 17.1. {{< /history >}} Before you use the Docker Autoscaler executor, see the [feedback issue](https://gitlab.com/gitlab-org/gitlab/-/issues/408131) about GitLab Runner autoscaling for a list of known issues. The Docker Autoscaler executor is an autoscale-enabled Docker executor that creates instances on demand to accommodate the jobs that the runner manager processes. It wraps the [Docker executor](docker.md) so that all Docker executor options and features are supported. The Docker Autoscaler uses [fleeting plugins](https://gitlab.com/gitlab-org/fleeting/plugins) to autoscale. Fleeting is an abstraction for a group of autoscaled instances, which uses plugins that support cloud providers, like Google Cloud, AWS, and Azure. ## Install a fleeting plugin To install a plugin for your target platform, see [Install the fleeting plugin](../fleet_scaling/fleeting.md#install-a-fleeting-plugin). For specific configuration details, see the [respective plugin project documentation](https://gitlab.com/gitlab-org/fleeting/plugins). ## Configure Docker Autoscaler The Docker Autoscaler executor wraps the [Docker executor](docker.md) so that all Docker executor options and features are supported. To configure the Docker Autoscaler, in the `config.toml`: - In the [`[runners]`](../configuration/advanced-configuration.md#the-runners-section) section, specify the `executor` as `docker-autoscaler`. - In the following sections, configure the Docker Autoscaler based on your requirements: - [`[runners.docker]`](../configuration/advanced-configuration.md#the-runnersdocker-section) - [`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section) ### Dedicated autoscaling groups for each runner configuration Each Docker Autoscaler configuration must have its own dedicated autoscaling resource: - For AWS, a dedicated auto scaling group - For GCP, a dedicated instance group - For Azure, a dedicated scale set Do not share these autoscaling resources across: - Multiple runner managers (separate GitLab Runner installations) - Multiple `[[runners]]` entries within the same runner manager's `config.toml` The Docker Autoscaler keeps track of the instance state that must be synchronized with the cloud provider's autoscaling resource. When multiple systems attempt to manage the same autoscaling resource, they might issue conflicting scaling commands, resulting in unpredictable behavior, job failures, and potentially higher costs. ### Example: AWS autoscaling for 1 job per instance Prerequisites: - An AMI with [Docker Engine](https://docs.docker.com/engine/) installed. To enable Runner Manager's access to the Docker socket on the AMI, the user must be part of the `docker` group. > [!note] > The AMI does not require GitLab Runner to be installed. The instances launched using the AMI must not register themselves as runners in GitLab. - An AWS autoscaling group. The runner directly manages all scaling behavior. For the scaling policy, use `none` and turn on instance scale-in protection. If you have configured multiple availability zones, turn off the `AZRebalance` process. - An IAM policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy). This configuration supports: - A capacity per instance of 1 - A use count of 1 - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 By setting the capacity and use count to both 1, each job is given a secure ephemeral instance that cannot be affected by other jobs. As soon the job is complete the instance it was executed on is immediately deleted. With an idle scale of 5, the runner tries to keep 5 whole instances (because the capacity per instance is 1) available for future demand. These instances stay for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows AMIs # uncomment for Windows AMIs when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "ec2-user" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Example: Google Cloud instance group for 1 job per instance Prerequisites: - A VM image with [Docker Engine](https://docs.docker.com/engine/) installed, such as [`COS`](https://docs.cloud.google.com/container-optimized-os/docs). > [!note] > The VM image does not require GitLab Runner to be installed. The instances launched using the VM image must not register themselves as runners in GitLab. - A single-zone Google Cloud instance group. For **Autoscaling mode**, select **Do not autoscale**. The runner handles autoscaling, not the Google Cloud instance group. > [!note] > Multi-zone instance groups are not currently supported. An [issue](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud/-/issues/20) > exists to support multi-zone instance groups in the future. - An IAM policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions). If you're deploying your runner in a GKE cluster, you can add an IAM binding between the Kubernetes service account and the GCP service account. You can add this binding with the `iam.workloadIdentityUser` role to authenticate to GCP instead of using a key file with `credentials_file`. This configuration supports: - A capacity per instance of 1 - A use count of 1 - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 By setting the capacity and use count to both 1, each job is given a secure ephemeral instance that cannot be affected by other jobs. As soon the job is complete the instance it was executed on is immediately deleted. With an idle scale of 5, the runner tries to keep 5 whole instances (because the capacity per instance is 1) available for future demand. These instances stay for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows Images # uncomment for Windows Images when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "runner" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Example: Azure scale set for 1 job per instance Prerequisites: - An Azure VM image with [Docker Engine](https://docs.docker.com/engine/) installed. > [!note] > The VM image does not require GitLab Runner to be installed. The instances launched using the VM image must not register themselves as runners in GitLab. - An Azure scale set where the autoscaling policy is set to `manual`. The runner handles the scaling. This configuration supports: - A capacity per instance of 1 - A use count of 1 - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 When the capacity and use count are both set to `1`, each job is given a secure ephemeral instance that cannot be affected by other jobs. When the job completes, the instance it was executed on is immediately deleted. When the idle scale is set to `5`, the runner keeps 5 instances available for future demand (because the capacity per instance is 1). These instances stay for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows AMIs # uncomment for Windows AMIs when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-scale-set" subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "azureuser" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## Slot-based cgroup support The Docker Autoscaler executor supports slot-based cgroups for improved resource isolation between concurrent jobs. Cgroup paths are automatically applied to Docker containers using the `--cgroup-parent` flag. For detailed information about slot-based cgroups, including benefits, prerequisites, and setup instructions, see [slot-based cgroup support](../configuration/slot_based_cgroups.md). ### Docker-specific configuration In addition to the standard slot cgroup configuration, you can specify a separate cgroup template for service containers: ```toml [[runners]] executor = "docker+autoscaler" use_slot_cgroups = true slot_cgroup_template = "gitlab-runner/slot-${slot}" [runners.docker] service_slot_cgroup_template = "gitlab-runner/service-slot-${slot}" ``` For all available options, see the [slot-based cgroup configuration documentation](../configuration/slot_based_cgroups.md#docker-specific-configuration). ## Troubleshooting ### `ERROR: error during connect: ssh tunnel: EOF ()` When instances are removed by an external source (for example, an autoscaling group or automated script), jobs fail with the following error: ```plaintext ERROR: Job failed (system failure): error during connect: Post "http://internal.tunnel.invalid/v1.43/containers/xyz/wait?condition=not-running": ssh tunnel: EOF () ``` And the GitLab Runner logs show an `instance unexpectedly removed` error for the instance ID assigned to the job: ```plaintext ERROR: instance unexpectedly removed instance= max-use-count=9999 runner=XYZ slots=map[] subsystem=taskscaler used=45 ``` To resolve this error, check the events related to the instance on your cloud provider platform. For example, on AWS, check the CloudTrail event history for the event source `ec2.amazonaws.com`. ### `ERROR: Preparation failed: unable to acquire instance: context deadline exceeded` When you use the [AWS fleeting plugin](https://gitlab.com/gitlab-org/fleeting/plugins/aws), jobs might fail intermittently with the following error: ```plaintext ERROR: Preparation failed: unable to acquire instance: context deadline exceeded ``` This often shows up in the AWS CloudWatch logs because the `reserved` instance count oscillates up and down: ```plaintext "2024-07-23T18:10:24Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:0,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:10:25Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:1,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:11:15Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:0,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:11:16Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:1,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", ``` To resolve this error, ensure that the `AZRebalance` process is disabled for your autoscaling group in AWS. ### `Job failures when scaling from zero instances on Azure VMSS` Microsoft Azure Virtual Machine Scale Sets have an [overprovisioning feature](https://learn.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-design-overview#overprovisioning), which can cause job failures. When Azure scales up, it creates extra VMs to ensure capacity and then terminates them after it meets the requested capacity. This behavior conflicts with GitLab Runner's instance tracking, which causes the autoscaler to assign jobs to instances that Azure is about to terminate. Disable overprovisioning by setting `overprovision` to `false` in your VMSS configuration. ================================================ FILE: docs/executors/docker_machine.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Install and register GitLab Runner for autoscaling with Docker Machine --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!note] > The Docker Machine executor was deprecated in GitLab 17.5 and is scheduled for removal in GitLab 20.0 (May 2027). > While we continue to support the Docker Machine executor till GitLab 20.0, we do not plan to add new features. > We will address only critical bugs that could prevent CI/CD job execution or affect running costs. > If you're using the Docker Machine executor on Amazon Web Services (AWS) EC2, > Microsoft Azure Compute, or Google Compute Engine (GCE), you should migrate to the > [GitLab Runner Autoscaler](../runner_autoscale/_index.md). For an overview of the autoscale architecture, take a look at the [comprehensive documentation on autoscaling](../configuration/autoscale.md). ## Forked version of Docker machine Docker has [deprecated Docker Machine](https://gitlab.com/gitlab-org/gitlab/-/issues/341856). However, GitLab maintains a [Docker Machine fork](https://gitlab.com/gitlab-org/ci-cd/docker-machine) for GitLab Runner users who rely on the Docker Machine executor. This fork is based on the latest `main` branch of `docker-machine` with some additional patches for the following bugs: - [Make DigitalOcean driver RateLimit aware](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/2) - [Add backoff to Google driver operations check](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/7) - [Add `--google-min-cpu-platform` option for machine creation](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/9) - [Use cached IP for Google driver](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/15) - [Use cached IP for AWS driver](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/14) - [Add support for using GPUs in Google Compute Engine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/48) - [Support running AWS instances with IMDSv2](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/49) The intent of the [Docker Machine fork](https://gitlab.com/gitlab-org/ci-cd/docker-machine) is to only fix critical issues and bugs which affect running costs. We don't plan to add any new features. ## Preparing the environment To use the autoscale feature, Docker and GitLab Runner must be installed in the same machine: 1. Sign in to a new Linux-based machine that can function as a bastion server where Docker creates new machines. 1. [Install GitLab Runner](../install/_index.md). 1. Install Docker Machine from the [Docker Machine fork](https://gitlab.com/gitlab-org/ci-cd/docker-machine). 1. Optionally but recommended, prepare a [proxy container registry and a cache server](../configuration/speed_up_job_execution.md) to be used with the autoscaled runners. ## Configuring GitLab Runner 1. Familiarize yourself with the core concepts of using `docker-machine` with `gitlab-runner`: - Read [GitLab Runner Autoscaling](../configuration/autoscale.md) - Read [GitLab Runner MachineOptions](../configuration/advanced-configuration.md#the-runnersmachine-section) 1. The **first time** you're using Docker Machine, it is best to manually execute the `docker-machine create ...` command with your [Docker Machine Driver](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/drivers). Run this command with the options that you intend to configure in the [MachineOptions](../configuration/advanced-configuration.md#the-runnersmachine-section) under the `[runners.machine]` section. This approach sets up the Docker Machine environment properly and validates the specified options. After this, you can destroy the machine with `docker-machine rm [machine_name]` and start the runner. > [!note] > Multiple concurrent requests to `docker-machine create` that are done > **at first usage** are not good. When the `docker+machine` executor is used, > the runner may spin up few concurrent `docker-machine create` commands. > If Docker Machine is new to this environment, each process tries to create > SSH keys and SSL certificates for Docker API authentication. This action causes the > concurrent processes to interfere with each other. This can end with a non-working > environment. That's why it's important to create a test machine manually the > very first time you set up GitLab Runner with Docker Machine. 1. [Register a runner](../register/_index.md) and select the `docker+machine` executor when asked. 1. Edit [`config.toml`](../commands/_index.md#configuration-file) and configure the runner to use Docker machine. Visit the dedicated page covering detailed information about [GitLab Runner Autoscaling](../configuration/autoscale.md). 1. Now, you can try and start a new pipeline in your project. In a few seconds, if you run `docker-machine ls` you should see a new machine being created. ## Upgrading GitLab Runner 1. Check if your operating system is configured to automatically restart GitLab Runner (for example, by checking its service file): - **if yes**, ensure that service manager is [configured to use `SIGQUIT`](../configuration/init.md) and use the service's tools to stop the process: ```shell # For systemd sudo systemctl stop gitlab-runner # For upstart sudo service gitlab-runner stop ``` - **if no**, you may stop the process manually: ```shell sudo killall -SIGQUIT gitlab-runner ``` Sending the [`SIGQUIT` signal](../commands/_index.md#signals) makes the process stop gracefully. The process stops accepting new jobs, and exits as soon as the current jobs are finished. 1. Wait until GitLab Runner exits. You can check its status with `gitlab-runner status` or await a graceful shutdown for up to 30 minutes with: ```shell for i in `seq 1 180`; do # 1800 seconds = 30 minutes gitlab-runner status || break sleep 10 done ``` 1. You can now safely install the new version of GitLab Runner without interrupting any jobs. ## Using the forked version of Docker Machine ### Install 1. Download the [appropriate `docker-machine` binary](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/releases). Copy the binary to a location accessible to `PATH` and make it executable. For example, to download and install `v0.16.2-gitlab.46`: ```shell curl -O "https://gitlab-docker-machine-downloads.s3.amazonaws.com/v0.16.2-gitlab.46/docker-machine-Linux-x86_64" cp docker-machine-Linux-x86_64 /usr/local/bin/docker-machine chmod +x /usr/local/bin/docker-machine ``` ### Using GPUs on Google Compute Engine > [!note] > GPUs are [supported on every executor](../configuration/gpus.md). It is > not necessary to use Docker Machine just for GPU support. The Docker > Machine executor scales the GPU nodes up and down. > You can also use the [Kubernetes executor](kubernetes/_index.md) for this purpose. You can use the Docker Machine [fork](#forked-version-of-docker-machine) to create [Google Compute Engine instances with graphics processing units (GPUs)](https://docs.cloud.google.com/compute/docs/gpus). #### Docker Machine GPU options To create an instance with GPUs, use these Docker Machine options: | Option | Example | Description | |-------------------------------|--------------------------------|-------------| | `--google-accelerator` | `type=nvidia-tesla-p4,count=1` | Specifies the type and number of GPU accelerators to attach to the instance (`type=TYPE,count=N` format) | | `--google-maintenance-policy` | `TERMINATE` | Always use `TERMINATE` because [Google Cloud does not allow live migration of GPU instances](https://docs.cloud.google.com/compute/docs/instances/live-migration-process). | | `--google-machine-image` | `https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110` | The URL of a GPU-enabled operating system. See the [list of available images](https://docs.cloud.google.com/deep-learning-vm/docs/images). | | `--google-metadata` | `install-nvidia-driver=True` | This flag tells the image to install the NVIDIA GPU driver. | These arguments map to [command-line arguments for `gcloud compute`](https://docs.cloud.google.com/compute/docs/gcloud-compute). See the [Google documentation on creating VMs with attached GPUs](https://docs.cloud.google.com/compute/docs/gpus/create-vm-with-gpus) for more details. #### Verifying Docker Machine options To prepare your system and test that GPUs can be created with Google Compute Engine: 1. [Set up the Google Compute Engine driver credentials](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/gce.md#credentials) for Docker Machine. You may need to export environment variables to the runner if your VM does not have a default service account. How this is done depends on how the runner is launched. For example, by using: - `systemd` or `upstart`: See the [documentation on setting custom environment variables](../configuration/init.md#setting-custom-environment-variables). - Kubernetes with the Helm Chart: Update [the `values.yaml` entry](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/blob/5e7c5c0d6e1159647d65f04ff2cc1f45bb2d5efc/values.yaml#L431-438). - Docker: Use the `-e` option (for example, `docker run -e GOOGLE_APPLICATION_CREDENTIALS=/path/to/credentials.json gitlab/gitlab-runner`). 1. Verify that `docker-machine` can create a virtual machine with your desired options. For example, to create an `n1-standard-1` machine with a single NVIDIA Tesla P4 accelerator, substitute `test-gpu` with a name and run: ```shell docker-machine create --driver google --google-project your-google-project \ --google-disk-size 50 \ --google-machine-type n1-standard-1 \ --google-accelerator type=nvidia-tesla-p4,count=1 \ --google-maintenance-policy TERMINATE \ --google-machine-image https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110 \ --google-metadata "install-nvidia-driver=True" test-gpu ``` 1. To verify the GPU is active, SSH into the machine and run `nvidia-smi`: ```shell $ docker-machine ssh test-gpu sudo nvidia-smi +-----------------------------------------------------------------------------+ | NVIDIA-SMI 450.51.06 Driver Version: 450.51.06 CUDA Version: 11.0 | |-------------------------------+----------------------+----------------------+ | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC | | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | | | | MIG M. | |===============================+======================+======================| | 0 Tesla P4 Off | 00000000:00:04.0 Off | 0 | | N/A 43C P0 22W / 75W | 0MiB / 7611MiB | 3% Default | | | | N/A | +-------------------------------+----------------------+----------------------+ +-----------------------------------------------------------------------------+ | Processes: | | GPU GI CI PID Type Process name GPU Memory | | ID ID Usage | |=============================================================================| | No running processes found | +-----------------------------------------------------------------------------+ ``` 1. Remove this test instance to save money: ```shell docker-machine rm test-gpu ``` #### Configuring GitLab Runner 1. After you have verified these options, configure the Docker executor to use all available GPUs in the [`runners.docker` configuration](../configuration/advanced-configuration.md#the-runnersdocker-section). Then add the Docker Machine options to your [`MachineOptions` settings in the GitLab Runner `runners.machine` configuration](../configuration/advanced-configuration.md#the-runnersmachine-section). For example: ```toml [runners.docker] gpus = "all" [runners.machine] MachineOptions = [ "google-project=your-google-project", "google-disk-size=50", "google-disk-type=pd-ssd", "google-machine-type=n1-standard-1", "google-accelerator=count=1,type=nvidia-tesla-p4", "google-maintenance-policy=TERMINATE", "google-machine-image=https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110", "google-metadata=install-nvidia-driver=True" ] ``` ## Troubleshooting When working with the Docker Machine executor, you might encounter the following issues. ### Error: Error creating machine When installing Docker Machine, you might encounter an error that states `ERROR: Error creating machine: Error running provisioning: error installing docker`. Docker Machine attempts to install Docker on a newly provisioned virtual machine using this script: ```shell if ! type docker; then curl -sSL "https://get.docker.com" | sh -; fi ``` If the `docker` command succeeds, Docker Machine assumes Docker is installed and continues. If it does not succeed, Docker Machine attempts to download and run the script at `https://get.docker.com`. If the installation fails, it's possible the operating system is no longer supported by Docker. To troubleshoot this issue, you can enable debugging on Docker Machine by setting `MACHINE_DEBUG=true` in the environment where GitLab Runner is installed. ### Error: Cannot connect to the Docker daemon The job might fail during the prepare stage with an error message: ```plaintext Preparing environment ERROR: Job failed (system failure): prepare environment: Cannot connect to the Docker daemon at tcp://10.200.142.223:2376. Is the docker daemon running? (docker.go:650:120s). Check https://docs.gitlab.com/runner/shells/#shell-profile-loading for more information ``` This error occurs when the Docker daemon fails to start in the expected time in the VM created by the Docker Machine executor. To fix this issue, increase the `wait_for_services_timeout` value in the [`[runners.docker]`](../configuration/advanced-configuration.md#the-runnersdocker-section) section. ================================================ FILE: docs/executors/instance.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Instance executor --- {{< history >}} - Introduced in GitLab Runner 15.11.0 as an [experiment](https://docs.gitlab.com/policy/development_stages_support/#experiment). - [Changed](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29404) to [beta](https://docs.gitlab.com/policy/development_stages_support/#beta) in GitLab Runner 16.6. - [Generally available](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29221) in GitLab Runner 17.1. {{< /history >}} The instance executor is an autoscale-enabled executor that creates instances on demand to accommodate the expected volume of jobs that the runner manager processes. You can use the instance executor when jobs need full access to the host instance, operating system, and attached devices. The instance executor can also be configured to accommodate single-tenant and multi-tenant jobs with various levels of isolation and security. ## Nested virtualization The instance executor supports nested virtualization with the GitLab-developed [nesting daemon](https://gitlab.com/gitlab-org/fleeting/nesting). The nesting daemon enables creation and deletion of pre-configured virtual machines on host systems used for isolated and short-lived workloads, like jobs. Nesting is only supported on Apple Silicon instances. ## Prepare the environment for autoscaling To prepare the environment for autoscaling: 1. [Install a fleeting plugin](../fleet_scaling/fleeting.md#install-a-fleeting-plugin) for your target platform where the runner manager is installed and configured. 1. Create a VM image for the platform you're using. The image must include: - Git - GitLab Runner binary > [!note] > To process job artifacts and cache, install the GitLab Runner binary on the virtual machine and keep the > runner executable in the default path. > The VM image does not require GitLab Runner to run. The instances launched using the VM image must not register > themselves as runners in GitLab. - Dependencies required by the jobs you plan to run ## Configure the executor to autoscale Prerequisites: - You must be an administrator. To configure the instance executor for autoscaling, update the following sections in the `config.toml`: - [`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section) - [`[runners.instance]`](../configuration/advanced-configuration.md#the-runnersinstance-section) ## Preemptive mode With fleeting and taskscaler: - When turned on, the runner manager does not request new CI/CD jobs until idle instances are available. In this mode, CI/CD jobs run almost immediately. - If preemptive mode is turned off, the runner manager requests new CI/CD jobs regardless of whether idle instances are available to run those jobs. The number of jobs is based on `max_instances` and `capacity_per_instance`. In this mode, start times for CI/CD jobs are slower. You might be unable to provision new instances and so CI/CD jobs might not run. ## AWS autoscaling group configuration examples ### One job per instance Prerequisites: - An AMI with at least `git` and GitLab Runner installed. - An AWS Autoscaling group. For the scaling policy use `none`. The runner handles the scaling. - An IAM Policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy). This configuration supports: - A capacity of `1` for each instance. - A use count of `1`. - An idle scale of `5`. - An idle time of 20 minutes. - A maximum instance count of `10`. When the capacity and use count are set to `1`, each job is given a secure ephemeral instance that cannot be affected by other jobs. When the job completes, the instance it was executed on is deleted immediately. When the capacity for each instance is `1`, and the idle scale is `5`, the runner keeps 5 whole instances available for future demand. These instances remain for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number of instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "ec2-user" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Five jobs per instance with unlimited uses Prerequisites: - An AMI with at least `git` and GitLab Runner installed. - An AWS Autoscaling group with the scaling policy set to `none`. The runner handles the scaling. - An IAM Policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy). This configuration supports: - A capacity of `5` for each instance. - An unlimited use count. - An idle scale of `5`. - An idle time of 20 minutes. - A maximum instance count of `10`. When you set the capacity per instance to `5` with unlimited use count, each instance concurrently executes five jobs throughout the instance lifetime. When the idle scale is `5` and idle capacity of instance is `5`, one idle instance is created whenever the in-use capacity falls below five. Idle instances remain for at least 20 minutes. Jobs executed in these environments should be **trusted** as there is little isolation between them and each job can affect the performance of another. The runner `concurrent` field is set to 50 (maximum number instances * capacity per instance). ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "Administrator" timeout = "5m0s" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Two jobs per instance, unlimited uses, nested virtualization on EC2 Mac instances Prerequisites: - An Apple Silicon AMI with [nesting](https://gitlab.com/gitlab-org/fleeting/nesting) and [Tart](https://github.com/cirruslabs/tart) installed. - The Tart VM images that the runner uses. The VM images are specified by the `image` keyword of the job. The VM images should have at least `git` and GitLab Runner installed. - An AWS Autoscaling group. For the scaling policy use `none`, because runner handles the scaling. For information about how to set up an ASG for MacOS, see [Implementing autoscaling for EC2 Mac instances](https://aws.amazon.com/blogs/compute/implementing-autoscaling-for-ec2-mac-instances/). - An IAM policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy). This configuration supports: - A capacity of `2` for each instance. - An unlimited use count. - Nested virtualization to support isolated jobs. Nested virtualization is only available for Apple silicon instances with [nesting](https://gitlab.com/gitlab-org/fleeting/nesting) installed. - An idle scale of `5`. - An idle time of 20 minutes. - A maximum instance count of `10`. When the capacity for each instance is `2` and the use count is unlimited, each instance concurrently executes 2 jobs for the lifetime of the instance. When the idle scale is `2`, one idle instance is created whenever the in-use capacity falls below `2`. Idle instances remain for at least 24 hours. This time frame is due to the 24 hour minimal allocation period of AWS MacOS instance hosts. Jobs executed in this environment do not need to be trusted because [nesting](https://gitlab.com/gitlab-org/fleeting/nesting) is used for nested virtualization of each job. This only works on Apple silicon instances. The runner `concurrent` field is set to 8 (maximum number instances * capacity per instance). ```toml concurrent = 8 [[runners]] name = "macos applesilicon autoscaler example" url = "https://gitlab.com" token = "" executor = "instance" [runners.instance] allowed_images = ["*"] # allow any nesting image [runners.autoscaler] capacity_per_instance = 2 # AppleSilicon can only support 2 VMs per host max_use_count = 0 max_instances = 4 plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" [[runners.autoscaler.policy]] idle_count = 2 idle_time = "24h" # AWS's MacOS instances [runners.autoscaler.connector_config] username = "ec2-user" key_path = "macos-key.pem" timeout = "1h" # connecting to a MacOS instance can take some time, as they can be slow to provision [runners.autoscaler.plugin_config] name = "mac2metal" region = "us-west-2" [runners.autoscaler.vm_isolation] enabled = true nesting_host = "unix:///Users/ec2-user/Library/Application Support/nesting.sock" [runners.autoscaler.vm_isolation.connector_config] username = "nested-vm-username" password = "nested-vm-password" timeout = "20m" ``` ## Google Cloud instance group configuration examples ### One job per instance using a Google Cloud instance group Prerequisites: - A custom image with at least `git` and GitLab Runner installed. - A Google Cloud instance group where the autoscaling mode is set to `do not autoscale`. The runner handles the scaling. - An IAM policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions). If you're deploying your runner in a GKE cluster, you can add an IAM binding between the Kubernetes service account and the GCP service account. You can add this binding with the `iam.workloadIdentityUser` role to authenticate to GCP instead of using a key file with `credentials_file`. This configuration supports: - A capacity per instance of 1 - A use count of 1 - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 When the capacity and use count are both set to `1`, each job is given a secure ephemeral instance that cannot be affected by other jobs. When the job completes, the instance it was executed on is immediately deleted. When the idle scale is set to `5`, the runner keeps 5 instances available for future demand (because the capacity per instance is 1). These instances stay for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1-c" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "runner" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Five jobs per instance, unlimited uses, using Google Cloud Instance group Prerequisites: - A custom image with at least `git` and GitLab Runner installed. - An Instance group. For the "Autoscaling mode" select "do not autoscale", as Runner handles the scaling. - An IAM Policy with the [correct permissions](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions). This configuration supports: - A capacity per instance of 5 - An unlimited use count - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 When the capacity is set `5` and the use count is unlimited, each instance concurrently executes 5 jobs for the lifetime of the instance. Jobs executed in these environments should be **trusted** as there is little isolation between them and each job can affect the performance of another. When the idle scale is `5`, one idle instance is created whenever the in-use capacity falls below `5`. Idle instances stay for at least 20 minutes. The runner `concurrent` field is set to 50 (maximum number instances * capacity per instance). ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1-c" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "Administrator" timeout = "5m0s" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## Azure scale set configuration examples ### One job per instance using an Azure scale set Prerequisites: - A custom image with at least `git` and GitLab Runner installed. - An Azure scale set where the autoscaling mode is set to `manual` and overprovisioning is turned off. The runner handles the scaling. This configuration supports: - A capacity per instance of 1 - A use count of 1 - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 When the capacity and use count are both set to `1`, each job is given a secure ephemeral instance that cannot be affected by other jobs. When the job completes, the instance it was executed on is immediately deleted. When the idle scale is set to `5`, the runner keeps 5 instances available for future demand (because the capacity per instance is 1). These instances stay for at least 20 minutes. The runner `concurrent` field is set to 10 (maximum number instances * capacity per instance). ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-scale-set" # Azure scale set name subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "runner" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Five jobs per instance, unlimited uses, using an Azure scale set Prerequisites: - A custom image with at least `git` and GitLab Runner installed. - An Azure scale set where the autoscaling mode is set to `manual` and overprovisioning is turned off. The runner handles the scaling. This configuration supports: - A capacity per instance of 5 - An unlimited use count - An idle scale of 5 - An idle time of 20 minutes - A maximum instance count of 10 When the capacity is set `5` and the use count is unlimited, each instance concurrently executes 5 jobs for the lifetime of the instance. Jobs executed in these environments should be **trusted** as there is little isolation between them and each job can affect the performance of another. When the idle scale is `2`, one idle instance is created whenever the in-use capacity falls below `5`. Idle instances stay for at least 20 minutes. The runner `concurrent` field is set to 50 (maximum number instances * capacity per instance). ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-scale-set" # Azure scale set name subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "Administrator" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## Slot-based cgroup support The Instance executor supports slot-based cgroups for improved resource isolation between concurrent jobs. When enabled, the `GITLAB_RUNNER_SLOT_CGROUP` environment variable is automatically provided to jobs, allowing you to run processes under slot-specific cgroups. For detailed information about slot-based cgroups, including benefits, prerequisites, configuration, and setup instructions, see [slot-based cgroup support](../configuration/slot_based_cgroups.md). ### Using the GitLab Runner slot cgroup environment variable The Instance executor provides the `GITLAB_RUNNER_SLOT_CGROUP` environment variable to your jobs. Use this variable with tools like `systemd-run` or `cgexec` to run processes under the slot-specific cgroup. For usage examples and troubleshooting, see the [Instance executor section](../configuration/slot_based_cgroups.md#instance-executor) in the slot-based cgroup documentation. ## Troubleshooting When working with the Instance executor, you might encounter the following issues: ### `sh: 1: eval: Running on ip-x.x.x.x via runner-host...n: not found` This error typically occurs when the `eval` command in the preparation step fails. To resolve this error, switch to `bash` shell and enable the [feature flag](../configuration/feature-flags.md) `FF_USE_NEW_BASH_EVAL_STRATEGY`. ================================================ FILE: docs/executors/kubernetes/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Kubernetes executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Use the Kubernetes executor to use Kubernetes clusters for your builds. The executor calls the Kubernetes cluster API and creates a pod for each GitLab CI job. The Kubernetes executor divides the build into multiple steps: 1. **Prepare**: Create the Pod against the Kubernetes Cluster. This creates the containers required for the build and services to run. 1. **Pre-build**: Clone, restore cache, and download artifacts from previous stages. This step runs on a special container as part of the pod. 1. **Build**: User build. 1. **Post-build**: Create cache, upload artifacts to GitLab. This step also uses the special container as part of the pod. ## How the runner creates Kubernetes pods The following diagram shows the interaction between a GitLab instance and a runner hosted on a Kubernetes cluster. The runner calls the Kubernetes API to create pods on the cluster. The pod consists of the following containers for each `service` defined in the `.gitlab-ci.yml` or `config.toml` files: - A build container defined as `build`. - A helper container defined as `helper`. - A services containers defined as `svc-X`, where `X` is `[0-9]+`. Services and containers run in the same Kubernetes pod and share the same localhost address. The following restrictions apply: - The services are accessible through their DNS names. If you use an older version, you must use `localhost`. - You cannot use several services that use the same port. For example, you cannot have two `mysql` services at the same time. ```mermaid sequenceDiagram participant G as GitLab instance participant R as Runner on Kubernetes cluster participant Kube as Kubernetes API participant P as POD R->>+G: Get a CI job. loop G-->R: ; end Note over R,G: POST /api/v4/jobs/request G->>+R: CI job data. R-->>-Kube: Create a POD to run the CI job. Note over R,Kube: POST to Kube API P->>+P: Execute job. Note over P: CI build job = Prepare + Pre-build + Build + Post-build P->>+G: Job logs ``` The interaction in the diagram is valid for any Kubernetes cluster. For example, turnkey solutions hosted on the major public cloud providers, or self-managed Kubernetes installations. ## Connect to the Kubernetes API Use the following options to connect to the Kubernetes API. The user account provided must have permission to create, list, and attach to Pods in the specified namespace. | Option | Description | |-------------|-------------| | `host` | Optional Kubernetes API server host URL (auto-discovery attempted if not specified). | | `context` | Optional Kubernetes context name to use from your `kubectl` configuration. Use this option when you don't specify `host`. | | `cert_file` | Optional Kubernetes API server user auth certificate. | | `key_file` | Optional Kubernetes API server user auth private key. | | `ca_file` | Optional Kubernetes API server ca certificate. | If you're running GitLab Runner in the Kubernetes cluster, omit these fields so that the GitLab Runner auto-discovers the Kubernetes API. If you're running GitLab Runner externally to the Cluster, these settings ensure that GitLab Runner has access to the Kubernetes API on the cluster. You can either specify the `host` with authentication details, or use `context` to reference a specific context from your `kubectl` configuration. ### Set the bearer token for Kubernetes API calls To set the bearer token for API calls to create pods, use the `KUBERNETES_BEARER_TOKEN` variable. This allows project owners to use project secret variables to specify a bearer token. When specifying the bearer token, you must set the `Host` configuration setting. ``` yaml variables: KUBERNETES_BEARER_TOKEN: thebearertokenfromanothernamespace ``` ### Configure runner API permissions To configure permissions for the core API group, update the `values.yml` file for GitLab Runner Helm charts. You can either: - Set `rbac.create` to `true`. - Specify a service account `serviceAccount.name: ` with the following permissions in the `values.yml` file. | Resource | Verb (Optional Feature/Config Flags) | |----------|-------------------------------| | apps/deployments | create (`kubernetes.autoscaler`), delete (`kubernetes.autoscaler`), get (`kubernetes.autoscaler`), list (`kubernetes.autoscaler`), update (`kubernetes.autoscaler`) | | events | list (`print_pod_warning_events=true`), watch (`FF_PRINT_POD_EVENTS=true`) | | namespaces | create (`kubernetes.NamespacePerJob=true`), delete (`kubernetes.NamespacePerJob=true`) | | poddisruptionbudgets | create (`pod_disruption_budget=true`), get (`pod_disruption_budget=true`) | | pods | create, delete, get, list ([using Informers](#informers)), watch ([using Informers](#informers), `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | pods/attach | create (`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`), delete (`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`), get (`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`), patch (`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | pods/exec | create, delete, get, patch | | pods/log | get (`FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`, `FF_WAIT_FOR_POD_TO_BE_REACHABLE=true`), list (`FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | scheduling.k8s.io/priorityclasses | create (`kubernetes.autoscaler`), get (`kubernetes.autoscaler`) | | secrets | create, delete, get, update | | serviceaccounts | get | | services | create, get | You can use the following YAML role definition to create a role with the required permissions. ```yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: gitlab-runner namespace: default rules: - apiGroups: ["apps"] resources: ["deployments"] verbs: - "create" # Required when `kubernetes.autoscaler` - "delete" # Required when `kubernetes.autoscaler` - "get" # Required when `kubernetes.autoscaler` - "list" # Required when `kubernetes.autoscaler` - "update" # Required when `kubernetes.autoscaler` - apiGroups: [""] resources: ["events"] verbs: - "list" # Required when `print_pod_warning_events=true` - "watch" # Required when `FF_PRINT_POD_EVENTS=true` - apiGroups: [""] resources: ["namespaces"] verbs: - "create" # Required when `kubernetes.NamespacePerJob=true` - "delete" # Required when `kubernetes.NamespacePerJob=true` - apiGroups: ["policy"] resources: ["poddisruptionbudgets"] verbs: - "create" # Required when `pod_disruption_budget=true` - "get" # Required when `pod_disruption_budget=true` - apiGroups: [""] resources: ["pods"] verbs: - "create" - "delete" - "get" - "list" # Required when using Informers (https://docs.gitlab.com/runner/executors/kubernetes/#informers) - "watch" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`, using Informers (https://docs.gitlab.com/runner/executors/kubernetes/#informers) - apiGroups: [""] resources: ["pods/attach"] verbs: - "create" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "delete" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "get" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "patch" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - apiGroups: [""] resources: ["pods/exec"] verbs: - "create" - "delete" - "get" - "patch" - apiGroups: [""] resources: ["pods/log"] verbs: - "get" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`, `FF_WAIT_FOR_POD_TO_BE_REACHABLE=true` - "list" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - apiGroups: ["scheduling.k8s.io"] resources: ["priorityclasses"] verbs: - "create" # Required when `kubernetes.autoscaler` - "get" # Required when `kubernetes.autoscaler` - apiGroups: [""] resources: ["secrets"] verbs: - "create" - "delete" - "get" - "update" - apiGroups: [""] resources: ["serviceaccounts"] verbs: - "get" - apiGroups: [""] resources: ["services"] verbs: - "create" - "get" ``` Additional details: - The `event` permission is needed only for GitLab 16.2.1 and later. - The `namespace` permission is needed only when enabling namespace isolation by using `namespace_per_job`. - The `pods/log` permission is only needed when one of the following scenarios are true: - The [`FF_KUBERNETES_HONOR_ENTRYPOINT` feature flag](../../configuration/feature-flags.md) is enabled. - The [`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` feature flag](../../configuration/feature-flags.md) is disabled when the [`CI_DEBUG_SERVICES` variable](https://docs.gitlab.com/ci/services/#capturing-service-container-logs) is set to `true`. - The [`FF_WAIT_FOR_POD_TO_BE_REACHABLE` feature flag](../../configuration/feature-flags.md) is enabled. #### Informers In GitLab Runner 17.9.0 and later, a Kubernetes informer tracks build pod changes. This helps the executor detect the changes more quickly. The informer requires `list` and `watch` permissions for `pods`. When the executor starts the build, it checks the Kubernetes API for the permissions. If all permissions are granted, the executor uses an informer. If any permission is missing, GitLab Runner logs a warning. The build continues and uses the previous mechanism to track the build pod's status and changes. ## Configuration settings Use the following settings in the `config.toml` file to configure the Kubernetes executor. ### CPU requests and limits | Setting | Description | |---------------------------------------------|-------------| | `cpu_limit` | The CPU allocation given to build containers. | | `cpu_limit_overwrite_max_allowed` | The maximum amount that the CPU allocation can be written to for build containers. When empty, it disables the CPU limit overwrite feature. | | `cpu_request` | The CPU allocation requested for build containers. | | `cpu_request_overwrite_max_allowed` | The maximum amount that the CPU allocation request can be written to for build containers. When empty, it disables the CPU request overwrite feature. | | `helper_cpu_limit` | The CPU allocation given to build helper containers. | | `helper_cpu_limit_overwrite_max_allowed` | The maximum amount that the CPU allocation can be written to for helper containers. When empty, it disables the CPU limit overwrite feature. | | `helper_cpu_request` | The CPU allocation requested for build helper containers. | | `helper_cpu_request_overwrite_max_allowed` | The maximum amount that the CPU allocation request can be written to for helper containers. When empty, it disables the CPU request overwrite feature. | | `service_cpu_limit` | The CPU allocation given to build service containers. | | `service_cpu_limit_overwrite_max_allowed` | The maximum amount that the CPU allocation can be written to for service containers. When empty, it disables the CPU limit overwrite feature. | | `service_cpu_request` | The CPU allocation requested for build service containers. | | `service_cpu_request_overwrite_max_allowed` | The maximum amount that the CPU allocation request can be written to for service containers. When empty, it disables the CPU request overwrite feature. | | `pod_cpu_limit` | The CPU allocation given to build pod. | | `pod_cpu_limit_overwrite_max_allowed` | The maximum amount that the CPU allocation can be written to for build pod. When empty, it disables the CPU limit overwrite feature. | | `pod_cpu_request` | The CPU allocation requested for build pod. | | `pod_cpu_request_overwrite_max_allowed` | The maximum amount that the CPU allocation request can be written to for build pod. When empty, it disables the CPU request overwrite feature. | > [!note] > Pod-level resource specifications have been introduced as alpha features in [Kubernetes v1.32](https://v1-32.docs.kubernetes.io/blog/2024/12/11/kubernetes-v1-32-release/#pod-level-resource-specifications) and graduated to beta in [Kubernetes v1.34](https://kubernetes.io/blog/2025/09/22/kubernetes-v1-34-pod-level-resources/). ### Memory requests and limits | Setting | Description | |------------------------------------------------|-------------| | `memory_limit` | The amount of memory allocated to build containers. | | `memory_limit_overwrite_max_allowed` | The maximum amount that the memory allocation can be written to for build containers. When empty, it disables the memory limit overwrite feature. | | `memory_request` | The amount of memory requested from build containers. | | `memory_request_overwrite_max_allowed` | The maximum amount that the memory allocation request can be written to for build containers. When empty, it disables the memory request overwrite feature. | | `helper_memory_limit` | The amount of memory allocated to build helper containers. | | `helper_memory_limit_overwrite_max_allowed` | The maximum amount that the memory allocation can be written to for helper containers. When empty, it disables the memory limit overwrite feature. | | `helper_memory_request` | The amount of memory requested for build helper containers. | | `helper_memory_request_overwrite_max_allowed` | The maximum amount that the memory allocation request can be written to for helper containers. When empty, it disables the memory request overwrite feature. | | `service_memory_limit` | The amount of memory allocated to build service containers. | | `service_memory_limit_overwrite_max_allowed` | The maximum amount that the memory allocation can be written to for service containers. When empty, it disables the memory limit overwrite feature. | | `service_memory_request` | The amount of memory requested for build service containers. | | `service_memory_request_overwrite_max_allowed` | The maximum amount that the memory allocation request can be written to for service containers. When empty, it disables the memory request overwrite feature. | | `pod_memory_limit` | The amount of memory allocated to build pod. | | `pod_memory_limit_overwrite_max_allowed` | The maximum amount that the memory allocation can be written to for build pod. When empty, it disables the memory limit overwrite feature. | | `pod_memory_request` | The amount of memory requested for build pod. | | `pod_memory_request_overwrite_max_allowed` | The maximum amount that the memory allocation request can be written to for build pod. When empty, it disables the memory request overwrite feature. | #### Helper container memory sizing recommendations For optimal performance, set helper container memory limits based on your workload requirements: - **Workloads with caching and artifact generation**: Minimum 250 MiB - **Basic workloads without cache/artifacts**: Might work with lower limits (128-200 MiB) **Basic configuration example:** ```toml [[runners]] executor = "kubernetes" [runners.kubernetes] helper_memory_limit = "250Mi" helper_memory_request = "250Mi" helper_memory_limit_overwrite_max_allowed = "1Gi" ``` **Job-specific memory overrides:** Use the `KUBERNETES_HELPER_MEMORY_LIMIT` variable to adjust memory for specific jobs without requiring administrator changes: ```yaml job_with_higher_helper_memory_limit: variables: KUBERNETES_HELPER_MEMORY_LIMIT: "512Mi" script: ``` This approach allows developers to optimize resource usage per job while maintaining cluster-wide limits through `helper_memory_limit_overwrite_max_allowed`. ### Storage requests and limits | Setting | Description | |-----------------------------------------------------------|-------------| | `ephemeral_storage_limit` | The ephemeral storage limit for build containers. | | `ephemeral_storage_limit_overwrite_max_allowed` | The maximum amount that the ephemeral storage limit for build containers can be overwritten. When empty, it disables the ephemeral storage limit overwrite feature. | | `ephemeral_storage_request` | The ephemeral storage request given to build containers. | | `ephemeral_storage_request_overwrite_max_allowed` | The maximum amount that the ephemeral storage request can be overwritten by for build containers. When empty, it disables the ephemeral storage request overwrite feature. | | `helper_ephemeral_storage_limit` | The ephemeral storage limit given to helper containers. | | `helper_ephemeral_storage_limit_overwrite_max_allowed` | The maximum amount that the ephemeral storage limit can be overwritten by for helper containers. When empty, it disables the ephemeral storage request overwrite feature. | | `helper_ephemeral_storage_request` | The ephemeral storage request given to helper containers. | | `helper_ephemeral_storage_request_overwrite_max_allowed` | The maximum amount that the ephemeral storage request can be overwritten by for helper containers. When empty, it disables the ephemeral storage request overwrite feature. | | `service_ephemeral_storage_limit` | The ephemeral storage limit given to service containers. | | `service_ephemeral_storage_limit_overwrite_max_allowed` | The maximum amount that the ephemeral storage limit can be overwritten by for service containers. When empty, it disables the ephemeral storage request overwrite feature. | | `service_ephemeral_storage_request` | The ephemeral storage request given to service containers. | | `service_ephemeral_storage_request_overwrite_max_allowed` | The maximum amount that the ephemeral storage request can be overwritten by for service containers. When empty, it disables the ephemeral storage request overwrite feature. | ### Other `config.toml` settings | Setting | Description | |-----------------------------------------------|-------------| | `affinity` | Specify affinity rules that determine which node runs the build. Read more about [using affinity](#define-a-list-of-node-affinities). | | `allow_privilege_escalation` | Run all containers with the `allowPrivilegeEscalation` flag enabled. When empty, it does not define the `allowPrivilegeEscalation` flag in the container `SecurityContext` and allows Kubernetes to use the default [privilege escalation](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) behavior. | | `allowed_groups` | Array of group IDs that can be specified for container groups. If not present, all groups are allowed. For more information, see [configure container user and group](#configure-container-user-and-group). | | `allowed_images` | Wildcard list of images that can be specified in `.gitlab-ci.yml`. If not present all images are allowed (equivalent to `["*/*:*"]`). [View details](#restrict-docker-images-and-services). | | `allowed_pull_policies` | List of pull policies that can be specified in the `.gitlab-ci.yml` file or the `config.toml` file. | | `allowed_services` | Wildcard list of services that can be specified in `.gitlab-ci.yml`. If not present all images are allowed (equivalent to `["*/*:*"]`). [View details](#restrict-docker-images-and-services). | | `allowed_users` | Array of user IDs that can be specified for container users. If not present, all users are allowed. For more information, see [configure container user and group](#configure-container-user-and-group). | | `automount_service_account_token` | Boolean to control whether the service account token automatically mounts in the build pod. | | `bearer_token` | Default bearer token used to launch build pods. | | `bearer_token_overwrite_allowed` | Boolean to allow projects to specify a bearer token used to create the build pod. | | `build_container_security_context` | Sets a container security context for the build container. [Read more about security context](#set-a-security-policy-for-the-pod). | | `cap_add` | Specify Linux capabilities that should be added to the job pod containers. [Read more about capabilities configuration in Kubernetes executor](#specify-container-capabilities). | | `cap_drop` | Specify Linux capabilities that should be dropped from the job pod containers. [Read more about capabilities configuration in Kubernetes executor](#specify-container-capabilities). | | `cleanup_grace_period_seconds` | When a job completes, the duration in seconds that the pod has to terminate gracefully. After this period, the processes are forcibly halted with a kill signal. Ignored if `terminationGracePeriodSeconds` is specified. | | `context` | Kubernetes context name to use from `kubectl` configuration (when `host` is not specified). | | `dns_policy` | Specify the DNS policy that should be used when constructing the pod: `none`, `default`, `cluster-first`, `cluster-first-with-host-net`. The Kubernetes default (`cluster-first`) is used if not set. | | `dns_config` | Specify the DNS configuration that should be used when constructing the pod. [Read more about using pod's DNS config](#configure-pod-dns-settings). | | `helper_container_security_context` | Sets a container security context for the helper container. [Read more about security context](#set-a-security-policy-for-the-pod). | | `helper_image` | (Advanced) [Override the default helper image](../../configuration/advanced-configuration.md#helper-image) used to clone repositories and upload artifacts. | | `helper_image_flavor` | Sets the helper image flavor (`alpine`, `alpine3.21`, or `ubuntu`). Defaults to `alpine`. Using `alpine` is the same as `alpine3.21`. | | `host_aliases` | List of additional host name aliases that are added to all containers. [Read more about using extra host aliases](#add-extra-host-aliases). | | `image_pull_secrets` | An array of items containing the Kubernetes `docker-registry` secret names used to authenticate Docker image pulling from private registries. | | `init_permissions_container_security_context` | Sets a container security context for the init-permissions container. [Read more about security context](#set-a-security-policy-for-the-pod). | | `namespace` | Namespace in which to run Kubernetes Pods. | | `namespace_per_job` | Isolate jobs in separate namespaces. If enabled, `namespace` and `namespace_overwrite_allowed` are ignored. | | `namespace_overwrite_allowed` | Regular expression to validate the contents of the namespace overwrite environment variable (documented below). When empty, it disables the namespace overwrite feature. | | `node_selector` | A `table` of `key=value` pairs in the format of `string=string` (`string:string` in the case of environment variables). Setting this limits the creation of pods to Kubernetes nodes matching all the `key=value` pairs. [Read more about using node selectors](#specify-the-node-to-execute-builds). | | `node_tolerations` | A `table` of `"key=value" = "Effect"` pairs in the format of `string=string:string`. Setting this allows pods to schedule to nodes with all or a subset of tolerated taints. Only one toleration can be supplied through environment variable configuration. The `key`, `value`, and `effect` match with the corresponding field names in Kubernetes pod toleration configuration. | | `pod_annotations` | A `table` of `key=value` pairs in the format of `string=string`. The `table` contains a list of annotations to be added to each build pod created by the runner. The value of these can include environment variables for expansion. Pod annotations can be overwritten in each build. | | `pod_annotations_overwrite_allowed` | Regular expression to validate the contents of the pod annotations overwrite environment variable. When empty, it disables the pod annotations overwrite feature. | | `pod_labels` | A `table` of `key=value` pairs in the format of `string=string`. The `table` contains a list of labels to be added to each build pod created by the runner. The value of these can include environment variables for expansion. Pod labels can be overwritten in each build by using `pod_labels_overwrite_allowed`. | | `pod_labels_overwrite_allowed` | Regular expression to validate the contents of the pod labels overwrite environment variable. When empty, it disables the pod labels overwrite feature. Note that pod labels in the `runner.gitlab.com` label namespace cannot be overwritten. | | `pod_security_context` | Configured through the configuration file, this sets a pod security context for the build pod. [Read more about security context](#set-a-security-policy-for-the-pod). | | `pod_termination_grace_period_seconds` | Pod-level setting which determines the duration in seconds which the pod has to terminate gracefully. After this, the processes are forcibly halted with a kill signal. Ignored if `terminationGracePeriodSeconds` is specified. | | `poll_interval` | How frequently, in seconds, the runner polls the Kubernetes pod it has just created to check its status (default = 3). | | `poll_timeout` | The amount of time, in seconds, that needs to pass before the runner times out attempting to connect to the container it has just created. Use this setting for queueing more builds than the cluster can handle at a time (default = 180). | | `cleanup_resources_timeout` | The total amount of time for Kubernetes resources to be cleaned up after the job completes. Supported syntax: `1h30m`, `300s`, `10m`. Default is 5 minutes (`5m`). | | `priority_class_name` | Specify the Priority Class to be set to the pod. The default one is used if not set. | | `privileged` | Run containers with the privileged flag. | | `pull_policy` | Specify the image pull policy: `never`, `if-not-present`, `always`. If not set, the cluster's image [default pull policy](https://kubernetes.io/docs/concepts/containers/images/#updating-images) is used. For more information and instructions on how to set multiple pull policies, see [using pull policies](#set-a-pull-policy). See also [`if-not-present`, `never` security considerations](../../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy). You can also [restrict pull policies](#restrict-docker-pull-policies). | | `resource_availability_check_max_attempts` | The maximum number of attempts to check if a resource (service account and/or pull secret) set is available before giving up. There is 5 seconds interval between each attempt. [Read more about resources check during prepare step](#resources-check-during-prepare-step). | | `runtime_class_name` | A Runtime class to use for all created pods. If the feature is unsupported by the cluster, jobs exit or fail. | | `service_container_security_context` | Sets a container security context for the service containers. [Read more about security context](#set-a-security-policy-for-the-pod). | | `scheduler_name` | Scheduler to use for scheduling build pods. | | `service_account` | Default service account job/executor pods use to talk to Kubernetes API. | | `service_account_overwrite_allowed` | Regular expression to validate the contents of the service account overwrite environment variable. When empty, it disables the service account overwrite feature. | | `services` | List of [services](https://docs.gitlab.com/ci/services/) attached to the build container using the [sidecar pattern](https://learn.microsoft.com/en-us/azure/architecture/patterns/sidecar). Read more about [using services](#define-a-list-of-services). | | `use_service_account_image_pull_secrets` | When enabled, the pod created by the executor lacks `imagePullSecrets`. This causes the pod to be created using the [`imagePullSecrets` from the service account](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/#add-image-pull-secret-to-service-account), if set. | | `terminationGracePeriodSeconds` | Duration after the processes running in the pod are sent a termination signal and the time when the processes are forcibly halted with a kill signal. [Deprecated in favour of `cleanup_grace_period_seconds` and `pod_termination_grace_period_seconds`](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28165). | | `volumes` | Configured through the configuration file, the list of volumes that is mounted in the build container. [Read more about using volumes](#configure-volume-types). | | `pod_spec` | This setting is an experiment. Overwrites the pod specification generated by the runner manager with a list of configurations set on the pod used to run the CI Job. All the properties listed `Kubernetes Pod Specification` can be set. For more information, see [Overwrite generated pod specifications (experiment)](#overwrite-generated-pod-specifications). | | `retry_limit` | The maximum number of attempts to communicate with Kubernetes API. The retry interval between each attempt is based on a backoff algorithm starting at 500 ms. | | `retry_backoff_max` | Custom maximum backoff value in milliseconds for the retry interval to reach for each attempt. The default value is 2000 ms and it can not be lower than 500 ms. The default maximum retry interval to reach for each attempt is 2 seconds and can be customized with `retry_backoff_max`. | | `retry_limits` | How many times each request error is to be retried. | | `logs_base_dir` | Base directory to be prepended to the generated path to store build logs. For more information, see [Change the base directory for build logs and scripts](#change-the-base-directory-for-build-logs-and-scripts). | | `scripts_base_dir` | Base directory to be prepended to the generated path to store build scripts. For more information, see [Change the base directory for build logs and scripts](#change-the-base-directory-for-build-logs-and-scripts). | | `print_pod_warning_events` | When enabled, this feature retrieves all warning events associated with the pod when jobs fail. This functionality is enabled by default and requires a service account with at least [`events: list` permissions](#configure-runner-api-permissions). | | `pod_disruption_budget` | When enabled, a [`PodDisruptionBudget`](https://kubernetes.io/docs/tasks/run-application/configure-pdb/) is created for each job pod to prevent eviction during voluntary disruptions such as node drains and cluster upgrades. Disabled by default. Requires a service account with [`poddisruptionbudgets` permissions](#configure-runner-api-permissions). | ### Configuration example The following sample shows an example configuration of the `config.toml` file for the Kubernetes executor. ```toml concurrent = 4 [[runners]] name = "myRunner" url = "https://gitlab.com/ci" token = "......" executor = "kubernetes" [runners.kubernetes] host = "https://45.67.34.123:4892" cert_file = "/etc/ssl/kubernetes/api.crt" key_file = "/etc/ssl/kubernetes/api.key" ca_file = "/etc/ssl/kubernetes/ca.crt" namespace = "gitlab" namespace_overwrite_allowed = "ci-.*" bearer_token_overwrite_allowed = true privileged = true cpu_limit = "1" memory_limit = "1Gi" service_cpu_limit = "1" service_memory_limit = "1Gi" helper_cpu_limit = "500m" helper_memory_limit = "100Mi" poll_interval = 5 poll_timeout = 3600 dns_policy = "cluster-first" priority_class_name = "priority-1" logs_base_dir = "/tmp" scripts_base_dir = "/tmp" [runners.kubernetes.node_selector] gitlab = "true" [runners.kubernetes.node_tolerations] "node-role.kubernetes.io/master" = "NoSchedule" "custom.toleration=value" = "NoSchedule" "empty.value=" = "PreferNoSchedule" "onlyKey" = "" ``` ## Pre-warm cluster capacity with pause pods {{< history >}} - Introduced in GitLab Runner 18.10. {{< /history >}} You can configure the Kubernetes executor to maintain pause pods that pre-warm cluster capacity. When a job starts, the low-priority pause pods are preempted, and the job pod is scheduled immediately on existing nodes. This configuration reduces job startup latency from waiting for the cluster autoscaler to provision new nodes. ### How pause pods work 1. The runner creates a `Deployment` of pause pods based on configured policies. 1. Pause pods use a low priority class, so Kubernetes preempts them when higher-priority job pods need resources. 1. When a pause pod is preempted, the job pod takes its place immediately. 1. The `Deployment` recreates the preempted pause pod, potentially triggering the cluster autoscaler to add a new node. ### Configure pause pods To enable pause pods, add a `[runners.kubernetes.autoscaler]` section to your `config.toml`: ```toml [[runners]] name = "kubernetes-runner" executor = "kubernetes" [runners.kubernetes] namespace = "gitlab-runner" cpu_request = "500m" memory_request = "1Gi" [runners.kubernetes.autoscaler] max_pause_pods = 10 [[runners.kubernetes.autoscaler.policy]] idle_count = 5 periods = ["* 8-17 * * mon-fri"] timezone = "UTC" [[runners.kubernetes.autoscaler.policy]] idle_count = 0 periods = ["* * * * *"] ``` ### Autoscaler settings | Setting | Description | |---------|-------------| | `max_pause_pods` | Maximum number of pause pods to create. Set to `0` for unlimited. | | `pause_pod_image` | Image for pause pods. Defaults to `registry.k8s.io/pause:3.10`. | | `pause_pod_priority_class_name` | Priority class for pause pods. Defaults to `gitlab-runner-idle-capacity` (auto-created with priority `-1`). If specified, auto-creation is skipped. | ### Priority classes for preemption For pause pods to be preempted by job pods, they must have a lower priority. By default, the runner automatically creates a `PriorityClass` named `gitlab-runner-idle-capacity` with priority `-1`. Because pods without a priority class use priority `0`, job pods will preempt pause pods. To use a custom `PriorityClass` instead, specify it in your configuration: ```toml [runners.kubernetes.autoscaler] pause_pod_priority_class_name = "my-custom-priority-class" ``` If your job pods use a custom priority class, ensure it has a higher value than the pause pod priority class. ### Policy settings You can define multiple policies. The last matching policy based on the current time is used. | Setting | Description | |---------|-------------| | `periods` | Array of cron expressions defining when this policy is active. Defaults to `* * * * *` (always). | | `timezone` | Timezone for evaluating cron expressions. Defaults to system local time. | | `idle_count` | Target number of pause pods to maintain. Defaults to `0`. | | `idle_time` | Scale-down cooldown. When desired capacity decreases, pause pods are removed after this wait time. Prevents thrashing when using `scale_factor`. Defaults to `5m`. | | `scale_factor` | Scale pause pods based on active jobs: `max(idle_count, active_jobs * scale_factor)`. Defaults to `0` (disabled). | | `scale_factor_limit` | Maximum pause pods when using `scale_factor`. Defaults to `0` (no limit). | ### Cron syntax The `periods` setting uses standard cron format with five fields: ```plaintext ┌────────── minute (0 - 59) │ ┌──────── hour (0 - 23) │ │ ┌────── day of month (1 - 31) │ │ │ ┌──── month (1 - 12) │ │ │ │ ┌── day of week (0 - 7, where 0 and 7 are Sunday, or MON-SUN) * * * * * ``` Examples: | Period | Description | |--------|-------------| | `* * * * *` | Always active | | `* 8-17 * * mon-fri` | Weekdays 8:00-17:59 | | `* 0-12 * * *` | Midnight to 12:59 daily | ### Create the priority class Pause pods require a priority class with lower priority than job pods. Create the priority class before configuring pause pods: ```yaml apiVersion: scheduling.k8s.io/v1 kind: PriorityClass metadata: name: pause-pods value: -10 globalDefault: false description: "Low priority class for runner pause pods" ``` ### Required RBAC permissions To use pause pods, configure additional permissions for the runner service account to manage `Deployments` and `PriorityClasses`: ```yaml - apiGroups: ["apps"] resources: ["deployments"] verbs: ["get", "list", "create", "update", "delete"] - apiGroups: ["scheduling.k8s.io"] resources: ["priorityclasses"] verbs: ["get", "create"] ``` > [!note] > `PriorityClass` is a cluster-scoped resource. A namespaced `Role` and > `RoleBinding` cannot grant the `scheduling.k8s.io/priorityclasses` permissions. > Use `ClusterRole` and `ClusterRoleBinding` instead. ## Configure the executor service account To configure the executor service account, you can set the `KUBERNETES_SERVICE_ACCOUNT` environment variable or use the `--kubernetes-service-account` flag. ## Pods and containers You can configure pods and containers to control how jobs are executed. ### Default labels for job pods > [!warning] > You cannot override these labels through runner configuration or `.gitlab-ci.yml` files. > Any attempts to set or modify labels in the `runner.gitlab.com` namespace > are ignored and logged as debug messages. | Key | Description | |--------------------------------------------|-------------| | `project.runner.gitlab.com/id` | The ID of the project, unique across projects in the GitLab instance. | | `project.runner.gitlab.com/name` | The name of the project. | | `project.runner.gitlab.com/namespace-id` | The ID of the project's namespace. | | `project.runner.gitlab.com/namespace` | The name of the project's namespace. | | `project.runner.gitlab.com/root-namespace` | The ID of the project's root namespace. For example, `/gitlab-org/group-a/subgroup-a/project`, where the root namespace is `gitlab-org` | | `manager.runner.gitlab.com/name` | The name of the runner configuration that launched this job. | | `manager.runner.gitlab.com/id-short` | The ID of the runner configuration that launched the job. | | `job.runner.gitlab.com/pod` | Internal label used by the Kubernetes executor. | ### Default annotations for job pods The following annotations are added by default on the Pod running the jobs: | Key | Description | |------------------------------------|-------------| | `job.runner.gitlab.com/id` | The ID of the job, unique across all jobs in the GitLab instance. | | `job.runner.gitlab.com/url` | The URL for the job details. | | `job.runner.gitlab.com/sha` | The commit revision the project is built for. | | `job.runner.gitlab.com/before_sha` | The previous latest commit present on a branch or tag. | | `job.runner.gitlab.com/ref` | The branch or tag name for which the project is built. | | `job.runner.gitlab.com/name` | The name of the job. | | `job.runner.gitlab.com/timeout` | The job execution timeout in the time duration format. For example, `2h3m0.5s`. | | `project.runner.gitlab.com/id` | The project ID of the job. | To overwrite default annotations, use the `pod_annotations` in the GitLab Runner configuration. You can also overwrite annotations for each CI/CD job in the [`.gitlab-ci.yml` file](#overwrite-pod-annotations). ### Pod lifecycle A [pod's lifecycle](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#lifecycle) can be affected by: - Setting the `pod_termination_grace_period_seconds` property in the `TOML` configuration file. The process running on the pod can run for the given duration after the `TERM` signal is sent. A kill signal is sent if the Pod is not successfully terminated after this period of time. - Enabling the [`FF_USE_POD_ACTIVE_DEADLINE_SECONDS` feature flag](../../configuration/feature-flags.md). When enabled and the job times out, the pod running the CI/CD job is marked as failed and all associated containers are killed. To have the job time out on GitLab first, `activeDeadlineSeconds` is set to `configured timeout + 1 second`. > [!note] > If you enable the `FF_USE_POD_ACTIVE_DEADLINE_SECONDS` feature flag and set > `pod_termination_grace_period_seconds` to a non-zero value, the CI/CD job pod > is not terminated immediately. The pod `terminationGracePeriods` > ensures the pod is terminated only when it expired. ### Protect job pods from eviction {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6331) in GitLab Runner 18.10. {{< /history >}} To protect job pods from [voluntary disruptions](https://kubernetes.io/docs/concepts/workloads/pods/disruptions/#voluntary-and-involuntary-disruptions) like node drains and cluster upgrades, turn on the `pod_disruption_budget` option. When turned on, this setting creates a [`PodDisruptionBudget`](https://kubernetes.io/docs/tasks/run-application/configure-pdb/) for each job pod with `minAvailable: 1`. This action prevents the Kubernetes eviction API from evicting the pod during voluntary disruptions. ```toml [runners.kubernetes] pod_disruption_budget = true ``` The `PodDisruptionBudget`: - Is automatically deleted when the job pod is deleted through Kubernetes owner references. - Does not protect against involuntary disruptions like node failures or out-of-memory kills. - Requires additional RBAC permissions. For detail, see [Configure runner API permissions](#configure-runner-api-permissions). > [!warning] > Turning on `PodDisruptionBudget` may cause node drains to hang if a job is running. Ensure your cluster upgrade > strategy accounts for potential node drain delays, or use job timeouts to limit how long a job can run. ### Overwrite pod tolerations To overwrite Kubernetes pod tolerations: 1. In the `config.toml` or Helm `values.yaml` file, to enable the overwrite of CI job pod tolerations, define a regular expression for `node_tolerations_overwrite_allowed`. This regular expression validates the values of CI variable names that start with `KUBERNETES_NODE_TOLERATIONS_`. ```toml runners: ... config: | [[runners]] [runners.kubernetes] node_tolerations_overwrite_allowed = ".*" ``` 1. In the `.gitlab-ci.yml` file, define one or more CI variables to overwrite CI job pod tolerations. ```yaml variables: KUBERNETES_NODE_TOLERATIONS_1: 'node-role.kubernetes.io/master:NoSchedule' KUBERNETES_NODE_TOLERATIONS_2: 'custom.toleration=value:NoSchedule' KUBERNETES_NODE_TOLERATIONS_3: 'empty.value=:PreferNoSchedule' KUBERNETES_NODE_TOLERATIONS_4: 'onlyKey' KUBERNETES_NODE_TOLERATIONS_5: '' # tolerate all taints ``` ### Overwrite pod labels To overwrite Kubernetes pod labels for each CI/CD job: 1. In the `.config.yaml` file, define a regular expression for `pod_labels_overwrite_allowed`. 1. In the `.gitlab-ci.yml` file, set the `KUBERNETES_POD_LABELS_*` variables with values of `key=value`. The pod labels are overwritten to the `key=value`. You can apply multiple values: ```yaml variables: KUBERNETES_POD_LABELS_1: "Key1=Val1" KUBERNETES_POD_LABELS_2: "Key2=Val2" KUBERNETES_POD_LABELS_3: "Key3=Val3" ``` > [!warning] > Labels in the `runner.gitlab.com` namespace are read-only. GitLab ignores any attempts to add, modify, or remove these GitLab-internal labels. ### Overwrite pod annotations To overwrite Kubernetes pod annotations for each CI/CD job: 1. In the `.config.yaml` file, define a regular expression for `pod_annotations_overwrite_allowed`. 1. In the `.gitlab-ci.yml` file, set the `KUBERNETES_POD_ANNOTATIONS_*` variables and use `key=value` for the value. Pod annotations are overwritten to the `key=value`. You can specify multiple annotations: ```yaml variables: KUBERNETES_POD_ANNOTATIONS_1: "Key1=Val1" KUBERNETES_POD_ANNOTATIONS_2: "Key2=Val2" KUBERNETES_POD_ANNOTATIONS_3: "Key3=Val3" ``` In the example below, the `pod_annotations` and the `pod_annotations_overwrite_allowed` are set. This configuration allows overwrite of any of the `pod_annotations` configured in the `config.toml`. ```toml [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] image = "alpine" pod_annotations_overwrite_allowed = ".*" [runners.kubernetes.pod_annotations] "Key1" = "Val1" "Key2" = "Val2" "Key3" = "Val3" "Key4" = "Val4" ``` ### Overwrite generated pod specifications {{< details >}} - Status: Beta {{< /details >}} This feature is in [beta](https://docs.gitlab.com/policy/development_stages_support/#beta). We strongly recommend that you use this feature on a test Kubernetes cluster before you use it on a production cluster. To use this feature, you must enable the `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` [feature flag](../../configuration/feature-flags.md). To add feedback before the feature is made generally available, leave a comment on [issue 556286](https://gitlab.com/gitlab-org/gitlab/-/issues/556286). To modify the `PodSpec` generated by the runner manager, use the `pod_spec` setting in the `config.toml` file. For runner operator-specific configuration, see [patch structure](../../configuration/configuring_runner_operator.md#patch-structure). The `pod_spec` setting: - Overwrites and completes fields for the generated pod specification. - Overwrites configuration values that might have been set in your `config.toml` under `[runners.kubernetes]`. You can configure multiple `pod_spec` settings. | Setting | Description | |--------------|-------------| | `name` | Name given to the custom `pod_spec`. | | `patch_path` | Path to the file that defines the changes to apply to the final `PodSpec` object before it is generated. The file must be a JSON or YAML file. | | `patch` | A JSON or YAML format string that describes the changes which must be applied to the final `PodSpec` object before it is generated. | | `patch_type` | The strategy the runner uses to apply the specified changes to the `PodSpec` object generated by GitLab Runner. The accepted values are `merge`, `json`, and `strategic`. | You cannot set the `patch_path` and `patch` in the same `pod_spec` configuration, otherwise an error occurs. Example of multiple `pod_spec` configurations in the `config.toml`: ```toml [[runners]] [runners.kubernetes] [[runners.kubernetes.pod_spec]] name = "hostname" patch = ''' hostname: "custom-pod-hostname" ''' patch_type = "merge" [[runners.kubernetes.pod_spec]] name = "subdomain" patch = ''' subdomain: "subdomain" ''' patch_type = "strategic" [[runners.kubernetes.pod_spec]] name = "terminationGracePeriodSeconds" patch = ''' [{"op": "replace", "path": "/terminationGracePeriodSeconds", "value": 60}] ''' patch_type = "json" ``` #### Merge patch strategy The `merge` patch strategy applies [a key-value replacement](https://datatracker.ietf.org/doc/html/rfc7386) on the existing `PodSpec`. If you use this strategy, the `pod_spec` configuration in the `config.toml` **overwrites** the values in the final `PodSpec` object before it is generated. Because the values are completely overwritten, you should use this patch strategy with caution. Example of a `pod_spec` configuration with the `merge` patch strategy: ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "build envvars" patch = ''' containers: - env: - name: env1 value: "value1" - name: env2 value: "value2" name: build ''' patch_type = "merge" ``` With this configuration, the final `PodSpec` has only one container called `build` with two environment variables `env1` and `env2`. The example above make the related CI Job failed as: - The `helper` container specification is removed. - The `build` container specification lost all necessary configuration set by GitLab Runner. To prevent the job from failing, in this example, the `pod_spec` must contain the untouched properties generated by GitLab Runner. #### JSON patch strategy The `json` patch strategy uses the [JSON Patch specification](https://datatracker.ietf.org/doc/html/rfc6902) to give control over the `PodSpec` objects and arrays to update. You cannot use this strategy on `array` properties. Example of a `pod_spec` configuration with the `json` patch strategy. In this configuration, a new `key: value pair` is added to the existing `nodeSelector`. The existing values are not overwritten. ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "val1 node" patch = ''' [{ "op": "add", "path": "/nodeSelector", "value": { key1: "val1" } }] ''' patch_type = "json" ``` #### Strategic patch strategy This `strategic` patch strategy uses the existing `patchStrategy` applied to each field of the `PodSpec` object. Example of a `pod_spec` configuration with the `strategic` patch strategy. In this configuration, a `resource request` is set to on the build container. ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "cpu request 500m" patch = ''' containers: - name: build resources: requests: cpu: "500m" ''' patch_type = "strategic" ``` With this configuration, a `resource request` is set to on the build container. #### Best practices - Test the added `pod_spec` in a test environment before deployment in a production environment. - Make sure that the `pod_spec` configuration does not negatively impact the GitLab Runner generated specification. - Do not use the `merge` patch strategy for complex pod specification updates. - Where possible, use the `config.toml` when the configuration is available. For example, the following configuration replaces the first environment variables set by GitLab Runner by the one set in the custom `pod_spec` instead of adding the environment variable set to the existing list. ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "build envvars" patch = ''' containers: - env: - name: env1 value: "value1" name: build ''' patch_type = "strategic" ``` #### Create a `PVC` for each build job by modifying the Pod Spec To create a [PersistentVolumeClaim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) for each build job make sure to check out how to enable the [Pod Spec functionality](#overwrite-generated-pod-specifications). Kubernetes allows you to create an ephemeral [PersistentVolumeClaim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/) attached to a pod's lifecycle. This approach works if [dynamic provisioning](https://kubernetes.io/docs/concepts/storage/dynamic-provisioning/) is enabled on your Kubernetes cluster. Each `PVC` can request a new [Volume](https://kubernetes.io/docs/concepts/storage/volumes/). The volume is also tied to the pod's lifecycle. After [dynamic provisioning](https://kubernetes.io/docs/concepts/storage/dynamic-provisioning/) is enabled, the `config.toml` can be modified as follows to create an ephemeral `PVC`: ```toml [[runners.kubernetes.pod_spec]] name = "ephemeral-pvc" patch = ''' containers: - name: build volumeMounts: - name: builds mountPath: /builds - name: helper volumeMounts: - name: builds mountPath: /builds volumes: - name: builds ephemeral: volumeClaimTemplate: spec: storageClassName: accessModes: [ ReadWriteOnce ] resources: requests: storage: 1Gi ''' ``` ### Set a security policy for the pod Configure the [security context](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) in the `config.toml` to set a security policy for the build pod. Use the following options: | Option | Type | Required | Description | |-----------------------|------------|----------|-------------| | `fs_group` | `int` | No | A special supplemental group that applies to all containers in a pod. | | `run_as_group` | `int` | No | The GID to run the entry point of the container process. | | `run_as_non_root` | boolean | No | Indicates that the container must run as a non-root user. | | `run_as_user` | `int` | No | The UID to run the entry point of the container process. | | `supplemental_groups` | `int` list | No | A list of groups applied to the first process run in each container, in addition to the container's primary GID. | | `selinux_type` | `string` | No | The SELinux type label that applies to all containers in a pod. | | `seccomp_profile.type` | string | No | The seccomp profile type. Valid values: `RuntimeDefault`, `Localhost`, `Unconfined`. | | `seccomp_profile.localhost_profile` | string | No | Path to a seccomp profile on the node. Required when type is `Localhost`. | | `app_armor_profile.type` | string | No | The AppArmor profile type. Valid values: `RuntimeDefault`, `Localhost`, `Unconfined`. Requires Kubernetes 1.30 or later. | | `app_armor_profile.localhost_profile` | string | No | The name of an AppArmor profile on the node. Required when type is `Localhost`. | Example of a pod security context in the `config.toml`: ```toml concurrent = %(concurrent)s check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registry.example.com/helper:latest" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 run_as_group = 59417 fs_group = 59417 ``` ### Remove old runner pods Sometimes old runner pods are not cleared. This can happen when the runner manager is incorrectly shut down. To handle this situation, you can use the GitLab Runner Pod Cleanup application to schedule cleanup of old pods. For more information, see: - The GitLab Runner Pod Cleanup project [README](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pod-cleanup/-/blob/main/readme.md). - GitLab Runner Pod Cleanup [documentation](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pod-cleanup/-/blob/main/docs/README.md). ### Set a security policy for the container Configure the [container security context](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/) in the `config.toml` executor to set a container security policy for the build, helper, or service pods. Use the following options: | Option | Type | Required | Description | |---------------------|-------------|----------|-------------| | `run_as_group` | int | No | The GID to run the entry point of the container process. | | `run_as_non_root` | boolean | No | Indicates that the container must run as a non-root user. | | `run_as_user` | int | No | The UID to run the entry point of the container process. | | `capabilities.add` | string list | No | The capabilities to add when running the container. | | `capabilities.drop` | string list | No | The capabilities to drop when running the container. | | `selinux_type` | string | No | The SELinux type label that is associated with the container process. | | `seccomp_profile.type` | string | No | The seccomp profile type. Valid values: `RuntimeDefault`, `Localhost`, `Unconfined`. | | `seccomp_profile.localhost_profile` | string | No | Path to a seccomp profile on the node. Required when type is `Localhost`. | | `app_armor_profile.type` | string | No | The AppArmor profile type. Valid values: `RuntimeDefault`, `Localhost`, `Unconfined`. Requires Kubernetes 1.30 or later. | | `app_armor_profile.localhost_profile` | string | No | The name of an AppArmor profile on the node. Required when type is `Localhost`. | In the following example in the `config.toml`, the security context configuration: - Sets a pod security context. - Overrides `run_as_user` and `run_as_group` for the build and helper containers. - Specifies that all service containers inherit `run_as_user` and `run_as_group` from the pod security context. ```toml concurrent = 4 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registry.example.com/helper:latest" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 run_as_group = 59417 fs_group = 59417 [runners.kubernetes.init_permissions_container_security_context] run_as_user = 1000 run_as_group = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 65534 run_as_group = 65534 [runners.kubernetes.build_container_security_context.capabilities] add = ["NET_ADMIN"] [runners.kubernetes.helper_container_security_context] run_as_user = 1000 run_as_group = 1000 [runners.kubernetes.service_container_security_context] run_as_user = 1000 run_as_group = 1000 ``` ### Set seccomp and AppArmor profiles You can configure [seccomp](https://kubernetes.io/docs/tutorials/security/seccomp/) and [AppArmor](https://kubernetes.io/docs/tutorials/security/apparmor/) profiles for build pods using the nested `seccomp_profile` and `app_armor_profile` configuration sections. These fields replace the deprecated annotation-based approach (`container.apparmor.security.beta.kubernetes.io` and `seccomp.security.alpha.kubernetes.io` annotations) with native Kubernetes API fields. | Field | Minimum Kubernetes Version | |-------|---------------------------| | `seccomp_profile` | 1.19 (GA) | | `app_armor_profile` | 1.30 (GA) | In the following example, seccomp and AppArmor profiles are set to `Unconfined` for the build container to enable rootless image building (for example, with BuildKit): ```toml concurrent = 4 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 1001 [runners.kubernetes.pod_security_context.seccomp_profile] type = "RuntimeDefault" [runners.kubernetes.build_container_security_context] run_as_user = 1001 run_as_group = 1001 [runners.kubernetes.build_container_security_context.seccomp_profile] type = "Unconfined" [runners.kubernetes.build_container_security_context.app_armor_profile] type = "Unconfined" ``` The `seccomp_profile` and `app_armor_profile` sections are available in both `pod_security_context` and all container security contexts (`build_container_security_context`, `helper_container_security_context`, `service_container_security_context`, `init_permissions_container_security_context`). For `Localhost` type profiles, specify the profile path: ```toml [runners.kubernetes.build_container_security_context.seccomp_profile] type = "Localhost" localhost_profile = "profiles/my-seccomp-profile.json" [runners.kubernetes.build_container_security_context.app_armor_profile] type = "Localhost" localhost_profile = "my-apparmor-profile" ``` ### Set a pull policy Use the `pull_policy` parameter in the `config.toml` file to specify a single or multiple pull policies. The policy controls how an image is fetched and updated, and applies to the build image, helper image, and any services. To determine which policy to use, see [the Kubernetes documentation about pull policies](https://kubernetes.io/docs/concepts/containers/images/#image-pull-policy). For a single pull policy: ```toml [runners.kubernetes] pull_policy = "never" ``` For multiple pull policies: ```toml [runners.kubernetes] # use multiple pull policies pull_policy = ["always", "if-not-present"] ``` When you define multiple policies, each policy is attempted until the image is obtained successfully. For example, when you use `[ always, if-not-present ]`, the policy `if-not-present` is used if the `always` policy fails due to a temporary registry problem. To retry a failed pull: ```toml [runners.kubernetes] pull_policy = ["always", "always"] ``` The GitLab naming convention is different to the Kubernetes one. | Runner pull policy | Kubernetes pull policy | Description | |--------------------|------------------------|-------------| | none | none | Uses the default policy, as specified by Kubernetes. | | `if-not-present` | `IfNotPresent` | The image is pulled only if it is not already present on the node that executes the job. Review the [security considerations](../../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy) before you use this pull policy. | | `always` | `Always` | The image is pulled every time the job is executed. | | `never` | `Never` | The image is never pulled and requires the node to already have it. | ### Specify container capabilities You can specify the [Kubernetes capabilities](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container) to use in the container. To specify the container capabilities, use the `cap_add` and `cap_drop` options in the `config.toml`. Container runtimes can also define a default list of capabilities, like those in [Docker](https://github.com/moby/moby/blob/19.03/oci/defaults.go#L14-L32) or the [container](https://github.com/containerd/containerd/blob/v1.4.0/oci/spec.go#L93-L110). There is a [list of capabilities](#default-list-of-dropped-capabilities) that the runner drops by default. Capabilities that you list in `cap_add` option are excluded from being dropped. Example configuration in the `config.toml` file: ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] # ... cap_add = ["SYS_TIME", "IPC_LOCK"] cap_drop = ["SYS_ADMIN"] # ... ``` When you specify the capabilities: - User-defined `cap_drop` has priority over user-defined `cap_add`. If you define the same capability in both settings, only the capability from `cap_drop` is passed to the container. - Remove the `CAP_` prefix from capability identifiers passed to the container configuration. For example, if you want to add or drop the `CAP_SYS_TIME` capability, in the configuration file, enter the string, `SYS_TIME`. - The owner of the Kubernetes cluster [can define a PodSecurityPolicy](https://kubernetes.io/docs/concepts/security/pod-security-policy/#capabilities), where specific capabilities are allowed, restricted, or added by default. These rules take precedence over any user-defined configuration. ### Configure container user and group {{< history >}} - Support for security context-based user configuration [introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38894) in GitLab Runner 18.4. {{< /history >}} Configure users and groups run by containers with the Kubernetes security context configuration. Administrators can control container security and allow jobs to specify users for specific container types. > [!note] > Setting `runAsUser`, `runAsGroup` or `image:user` in job definition for Windows is not supported. > Setting [runAsUserName](https://kubernetes.io/docs/tasks/configure-pod-container/configure-runasusername/) through [FF_USE_ADVANCED_POD_SPEC_CONFIGURATION](#overwrite-generated-pod-specifications) is recommended instead. #### Configuration precedence Runner applies user configuration in the following order: For build and service containers: 1. Container security context (`run_as_user`/`run_as_group`): Administrators control this configuration 1. Pod security context (`run_as_user`/`run_as_group`): Administrators control pod-level defaults 1. Job configuration (`.gitlab-ci.yml`): Users control this configuration For helper containers: 1. Helper container security context (`run_as_user`/`run_as_group`): Administrators control this configuration 1. Pod security context (`run_as_user`/`run_as_group`): Administrators control pod-level defaults Job configuration does not apply to helper containers for security isolation. Administrators can override user-specified values for security compliance. Helper containers remain isolated from job specifications. #### Requirements for Kubernetes Kubernetes requires numeric values for user and group IDs: - User and Group IDs must be integers - `SecurityContext` uses `run_as_user` and `run_as_group` and accepts only numeric values - In job configuration, use "1000" for only user, or "1000:1001" for user and group #### Override user and group settings Use pod and container-specific security contexts to override user and group settings: ```toml [[runners]] name = "k8s-runner" url = "https://gitlab.example.com" executor = "kubernetes" [runners.kubernetes] allowed_users = ["1000", "1001", "65534"] allowed_groups = ["1001", "65534"] # Pod security context - provides defaults for all containers [runners.kubernetes.pod_security_context] run_as_user = 1500 run_as_group = 1500 # Build container security context - overrides pod context [runners.kubernetes.build_container_security_context] run_as_user = 2000 run_as_group = 2001 # Helper container security context - overrides pod context [runners.kubernetes.helper_container_security_context] run_as_user = 3000 run_as_group = 3001 # Service container security context - overrides pod context [runners.kubernetes.service_container_security_context] run_as_user = 4000 run_as_group = 4001 ``` In this example: - Pod security context sets defaults (1500:1500) for containers without specific configuration - Container security contexts override the pod defaults - Users 1500, 2000, 3000, and 4000 are not in the `allowed_users` list, but security context can use them because these values bypass allowlist validation - This capability gives administrators unrestricted override control at both pod and container levels You can configure each container type independently. Security context configuration takes precedence over any user specification in job configurations. #### Specify users in job configuration Jobs can specify a user in the image configuration: ```yaml # Job with custom user job: image: name: alpine:latest kubernetes: user: "1000" script: - whoami - id # Job with user and group job_with_group: image: name: alpine:latest kubernetes: user: "1000:1001" script: - whoami - id # Job using environment variable job_dynamic: image: name: alpine:latest kubernetes: user: "${CUSTOM_USER_ID}" variables: CUSTOM_USER_ID: "1000" script: - whoami ``` #### Security validation The runner validates user and group IDs against allowlists for job-level configuration only: - Root user/group (UID/GID 0): Always requires explicit allowlist permission for job configuration - Empty `allowed_users`: Any non-root job user is allowed - Specified `allowed_users`: Only listed job users are allowed - Empty `allowed_groups`: Any non-root job group is allowed - Specified `allowed_groups`: Only listed job groups are allowed - Security context configuration: Not validated against allowlists (administrator override) ```toml [runners.kubernetes] allowed_users = ["1000", "65534"] allowed_groups = ["1001", "65534"] ``` #### Container behavior and precedence Security context configuration follows this precedence order (highest to lowest): 1. Container security context 1. Pod security context 1. Job configuration ```toml [runners.kubernetes] # Pod-level defaults [runners.kubernetes.pod_security_context] run_as_user = 1500 run_as_group = 1500 # Container-specific overrides [runners.kubernetes.build_container_security_context] run_as_user = 1000 run_as_group = 1001 [runners.kubernetes.helper_container_security_context] run_as_user = 1000 run_as_group = 1001 ``` ```yaml job: image: name: alpine:latest kubernetes: user: "2000:2001" # Ignored - container security context uses 1000:1001 ``` Each container type uses its security context configuration with pod-level fallback: - Build container: Uses `build_container_security_context` first, then `pod_security_context`, then job-level user configuration from `.gitlab-ci.yml`. - Helper container: Uses `helper_container_security_context` first, then `pod_security_context`. Does not inherit job-level user configuration. - Service containers: Use `service_container_security_context` first, then `pod_security_context`, then job-level user configuration. This approach gives you granular control over each container type's security configuration while keeping helper containers isolated from job specifications. #### Comparison with Docker executor | Feature | Docker executor | Kubernetes executor | |-------------------------------|------------------------------------|----------------------------------------------| | User format | Username or UID (`root` or `1000`) | Numeric UID only (`1000`) | | Group format | Not supported in user field | Numeric GID (`1000:1001`) | | Administrator override method | Runner `user` field | Container and pod security contexts | | Precedence | Runner > Job | Container context > Pod context > Job | | Security validation | Username allowlists | Numeric UID/GID allowlists | | Administrator override | Supported | Supported (pod and container levels) | | Helper container user | Same as build container | Uses own `helper_container_security_context` | | Pod-level defaults | Not available | `pod_security_context` | #### Troubleshoot user and group configuration ##### Error: `failed to parse UID` or `failed to parse GID` - Ensure the user ID is numeric: `"1000"` not `"user"` - Check the format: `"1000:1001"` for user and group - Negative values are not allowed ##### Error: `user "1000" is not in the allowed list` This error occurs only for job-level user configuration (`.gitlab-ci.yml`). Add the user to `allowed_users` in the runner configuration or remove `allowed_users` to allow any non-root job user. Security context and pod security context users are not validated against allowlists. ##### Error: `group "1001" is not in the allowed list` This error occurs only for job-level group configuration (`.gitlab-ci.yml`). Add the group to `allowed_groups` in the runner configuration or remove `allowed_groups` to allow any non-root job group. Security context and pod security context groups are not validated against allowlists. ##### Error: `user "0" is not in the allowed list` (Root user blocked) This error occurs only when root is specified in job configuration (`.gitlab-ci.yml`). Root user (UID 0) from job configuration requires explicit permission: add `"0"` to `allowed_users`. Alternatively, use security context or pod security context to set root user: `run_as_user = 0` (bypasses allowlist validation). ##### Container runs as different user than expected Check if the runner configuration overrides job configuration with security context (security context always wins). If using job configuration only, then verify if `allowed_users` contains the desired user ID. Security context values are not validated against allowlists and provide administrator override capability. ### Overwrite container resources You can overwrite Kubernetes CPU and memory allocations for each CI/CD job. You can apply settings for requests and limits for the build, helper, and service containers. To overwrite container resources, use the following variables in the `.gitlab-ci.yml` file. The values for the variables are restricted to the [maximum overwrite](#configuration-settings) setting for that resource. If the maximum overwrite has not been set for a resource, the variable is not used. ``` yaml variables: KUBERNETES_CPU_REQUEST: "3" KUBERNETES_CPU_LIMIT: "5" KUBERNETES_MEMORY_REQUEST: "2Gi" KUBERNETES_MEMORY_LIMIT: "4Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "1Gi" KUBERNETES_HELPER_CPU_REQUEST: "3" KUBERNETES_HELPER_CPU_LIMIT: "5" KUBERNETES_HELPER_MEMORY_REQUEST: "2Gi" KUBERNETES_HELPER_MEMORY_LIMIT: "4Gi" KUBERNETES_HELPER_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_HELPER_EPHEMERAL_STORAGE_LIMIT: "1Gi" KUBERNETES_SERVICE_CPU_REQUEST: "3" KUBERNETES_SERVICE_CPU_LIMIT: "5" KUBERNETES_SERVICE_MEMORY_REQUEST: "2Gi" KUBERNETES_SERVICE_MEMORY_LIMIT: "4Gi" KUBERNETES_SERVICE_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_SERVICE_EPHEMERAL_STORAGE_LIMIT: "1Gi" ``` ### Define a list of services {{< history >}} - [Introduced support for `HEALTCHECK_TCP_SERVICES`](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27215) in GitLab Runner 16.9. {{< /history >}} Define a list of [services](https://docs.gitlab.com/ci/services/) in the `config.toml`. ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registy.example.com/helper:latest" [[runners.kubernetes.services]] name = "postgres:12-alpine" alias = "db1" [[runners.kubernetes.services]] name = "registry.example.com/svc1" alias = "svc1" entrypoint = ["entrypoint.sh"] command = ["executable","param1","param2"] environment = ["ENV=value1", "ENV2=value2"] ``` If the service environment includes `HEALTHCHECK_TCP_PORT`, GitLab Runner waits until the service responds on that port before starting user CI scripts. You can also configure the `HEALTHCHECK_TCP_PORT` environment variable in a `services` section of `.gitlab-ci.yml`. ### Overwrite service containers resources If a job has multiple service containers, you can set explicit resource requests and limits to each service container. Use the variables attribute in each service to overwrite container resources specified in `.gitlab-ci.yml`. ```yaml services: - name: redis:5 alias: redis5 variables: KUBERNETES_SERVICE_CPU_REQUEST: "3" KUBERNETES_SERVICE_CPU_LIMIT: "6" KUBERNETES_SERVICE_MEMORY_REQUEST: "3Gi" KUBERNETES_SERVICE_MEMORY_LIMIT: "6Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "2Gi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "3Gi" - name: postgres:12 alias: MY_relational-database.12 variables: KUBERNETES_CPU_REQUEST: "2" KUBERNETES_CPU_LIMIT: "4" KUBERNETES_MEMORY_REQUEST: "1Gi" KUBERNETES_MEMORY_LIMIT: "2Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "1Gi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "2Gi" ``` These specific settings take precedence over the general settings for the job. The values are still restricted to the [maximum overwrite setting](#configuration-settings) for that resource. ### Overwrite the Kubernetes default service account To overwrite the Kubernetes service account for each CI/CD job in the `.gitlab-ci.yml` file, set the variable `KUBERNETES_SERVICE_ACCOUNT_OVERWRITE`. You can use this variable to specify a service account attached to the namespace, which you may need for complex RBAC configurations. ``` yaml variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: ci-service-account ``` To ensure only designated service accounts are used during CI runs, define a regular expression for either: - The `service_account_overwrite_allowed` setting. - The `KUBERNETES_SERVICE_ACCOUNT_OVERWRITE_ALLOWED` environment variable. If you don't set either, the overwrite is disabled. ### Set the `RuntimeClass` Use `runtime_class_name` to set the [`RuntimeClass`](https://kubernetes.io/docs/concepts/containers/runtime-class/) for each job container. If you specify a `RuntimeClass` name but did not configure it in the cluster, or the feature is not supported, the executor fails to create jobs. ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] runtime_class_name = "myclass" ``` ### Change the base directory for build logs and scripts {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760) in GitLab Runner 17.2. {{< /history >}} You can change the directory where `emptyDir` volumes are mounted to the pod for build logs and scripts. You can use the directory to: - Run job pods with a modified image. - Run as an unprivileged user. - Customize `SecurityContext` settings. To change the directory: - For build logs, set `logs_base_dir`. - For build scripts, set `scripts_base_dir`. The expected value is a string that represents a base directory without the trailing slash (for example, `/tmp` or `/mydir/example`). **The directory must already exist**. This value is prepended to the generated path for build logs and scripts. For example: ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" ``` This configuration would result in an `emptyDir` volume mounted in: - `/tmp/logs-${CI_PROJECT_ID}-${CI_JOB_ID}` for build logs instead of the default `/logs-${CI_PROJECT_ID}-${CI_JOB_ID}`. - `/tmp/scripts-${CI_PROJECT_ID}-${CI_JOB_ID}` for build scripts. ### User namespaces In Kubernetes 1.30 and later, you can isolate the user running in the container from the one on the host with [user namespaces](https://kubernetes.io/docs/concepts/workloads/pods/user-namespaces/). A process running as root in the container can run as a different unprivileged user on the host. With user namespaces, you can have more control over which images are used to run your CI/CD jobs. Operations that require additional settings (such as running as root) can also function without opening up additional attack surface on the host. To use this feature, ensure your cluster has been [properly configured](https://kubernetes.io/docs/concepts/workloads/pods/user-namespaces/#introduction). The following example adds `pod_spec` for the `hostUsers` key and disables both privileged pods and privilege escalation: ```toml [[runners]] environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true"] builds_dir = "/tmp/builds" [runners.kubernetes] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" privileged = false allowPrivilegeEscalation = false [[runners.kubernetes.pod_spec]] name = "hostUsers" patch = ''' [{"op": "add", "path": "/hostUsers", "value": false}] ''' patch_type = "json" ``` With user namespaces, you cannot use the default path for the build directory (`builds_dir`), build logs (`logs_base_dir`), or build scripts (`scripts_base_dir`). Even the container's root user does not have the permission to mount volumes. They also cannot create directories in the root of the container's file system. Instead, you can [change the base directory for build logs and scripts](#change-the-base-directory-for-build-logs-and-scripts). You can also change the build directory by setting `[[runners]].builds_dir`. ## Operating system, architecture, and Windows kernel version GitLab Runner with the Kubernetes executor can run builds on different operating systems if the configured cluster has nodes running those operating systems. The system determines the helper image's operating system, architecture, and Windows kernel version (if applicable). It then uses those parameters for other aspects of the build, for example the containers or images to use. The following diagram explains how the system detects these details: ```mermaid %%|fig-align: center flowchart TB init[Initial defaults:
OS: linux
Arch: amd64] hasAutoset{Configuration
helper_image_autoset_arch_and_os == true?} setArch[Update:
Arch: same as runner] isWin{GitLab Runner runs on Windows?} setWin[Update:
OS: windows
KernelVersion: same as runner] hasNodeSel{node_selector configured
in runners.kubernetes section?} hasNodeSelOverride{node_selector configured
as overwrite?} updateNodeSel[Update from node_selector if set:
OS: from kubernetes.io/os
Arch: from kubernetes.io/arch
KernelVersion: from node.kubernetes.io/windows-build] updateNodeSelOverride[Update from node_selector overwrites if set:
OS: from kubernetes.io/os
Arch: from kubernetes.io/arch
KernelVersion: from node.kubernetes.io/windows-build] result[final OS, Arch, kernelVersion] init --> hasAutoset hasAutoset -->|false | hasNodeSel hasAutoset -->|true | setArch setArch --> isWin isWin -->|false | hasNodeSel isWin -->|true | setWin setWin --> hasNodeSel hasNodeSel -->|false | hasNodeSelOverride hasNodeSel -->|true | updateNodeSel updateNodeSel --> hasNodeSelOverride hasNodeSelOverride -->|false | result hasNodeSelOverride -->|true | updateNodeSelOverride updateNodeSelOverride --> result ``` The following are the only parameters that influence the operating system, architecture, and Windows kernel version selection of the build. - The `helper_image_autoset_arch_and_os` configuration - The `kubernetes.io/os`, `kubernetes.io/arch`, and `node.kubernetes.io/windows-build` label selectors from: - `node_selector` configuration - `node_selector` overwrites Other parameters don't influence the selection process described above. However, you can use parameters like `affinity` to further limit the nodes on which builds are scheduled. ## Nodes ### Specify the node to execute builds Use the `node_selector` option to specify which node in a Kubernetes cluster can be used to execute the builds. It is a [`key=value`](https://toml.io/en/v1.0.0#keyvalue-pair) pair in `string=string` format (`string:string` in the case of environment variables). Runner uses the information provided to determine the operating system and architecture for the build. This ensures that the correct [helper image](../../configuration/advanced-configuration.md#helper-image) is used. The default operating system and architecture is `linux/amd64`. You can use specific labels to schedule nodes with different operating systems and architectures. #### Example for `linux/arm64` ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes.node_selector] "kubernetes.io/arch" = "arm64" "kubernetes.io/os" = "linux" ``` #### Example for `windows/amd64` Kubernetes for Windows has certain [limitations](https://kubernetes.io/docs/concepts/windows/intro/#windows-os-version-support). If you are using process isolation, you must also provide the specific Windows build version with the [`node.kubernetes.io/windows-build`](https://kubernetes.io/docs/reference/labels-annotations-taints/#nodekubernetesiowindows-build) label. ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" # The FF_USE_POWERSHELL_PATH_RESOLVER feature flag has to be enabled for PowerShell # to resolve paths for Windows correctly when Runner is operating in a Linux environment # but targeting Windows nodes. environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=true"] [runners.kubernetes.node_selector] "kubernetes.io/arch" = "amd64" "kubernetes.io/os" = "windows" "node.kubernetes.io/windows-build" = "10.0.20348" ``` ### Overwrite the node selector To overwrite the node selector: 1. In the `config.toml` or Helm `values.yaml` file, enable overwriting of the node selector: ```toml runners: ... config: | [[runners]] [runners.kubernetes] node_selector_overwrite_allowed = ".*" ``` 1. In the `.gitlab-ci.yml` file, define the variable to overwrite the node selector: ```yaml variables: KUBERNETES_NODE_SELECTOR_* = '' ``` In the following example, to overwrite the Kubernetes node architecture, the settings are configured in the `config.toml` and `.gitlab-ci.yml` file: {{< tabs >}} {{< tab title="`config.toml`" >}} ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 listen_address = ':9252' [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.com/" id = 0 token = "__REDACTED__" token_obtained_at = "0001-01-01T00:00:00Z" token_expires_at = "0001-01-01T00:00:00Z" executor = "kubernetes" shell = "bash" [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false image = "alpine" namespace = "" namespace_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" node_selector_overwrite_allowed = "kubernetes.io/arch=.*" # <--- allows overwrite of the architecture ``` {{< /tab >}} {{< tab title="`.gitlab-ci.yml`" >}} ```yaml job: image: IMAGE_NAME variables: KUBERNETES_NODE_SELECTOR_ARCH: 'kubernetes.io/arch=amd64' # <--- select the architecture ``` {{< /tab >}} {{< /tabs >}} ### Define a list of node affinities Define a list of [node affinities](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity) to add to a pod specification at build time. > [!note] > `node_affinities` does not determine which operating system a build should run with, only `node_selectors`. For more information, see [Operating system, architecture, and Windows kernel version](#operating-system-architecture-and-windows-kernel-version). > Example configuration in the `config.toml`: ```toml concurrent = 1 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.node_affinity] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "cpu_speed" operator = "In" values = ["fast"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "mem_speed" operator = "In" values = ["fast"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 50 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "core_count" operator = "In" values = ["high", "32"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_fields]] key = "cpu_type" operator = "In" values = ["arm64"] [runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms]] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms.match_expressions]] key = "kubernetes.io/e2e-az-name" operator = "In" values = [ "e2e-az1", "e2e-az2" ] ``` ### Define nodes where pods are scheduled Use pod affinity and anti-affinity to constrain the nodes [your pod is eligible](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity) to be scheduled on, based on labels on other pods. Example configuration in the `config.toml`: ```toml concurrent = 1 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.pod_affinity] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_anti_affinity] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] ``` ## Networking ### Configure a container lifecycle hook Use [container lifecycle hooks](https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/) to run code configured for a handler when the corresponding lifecycle hook is executed. You can configure two types of hooks: `PreStop` and `PostStart`. Each of them allows only one type of handler to be set. Example configuration in the `config.toml` file: ```toml [[runners]] name = "kubernetes" url = "https://gitlab.example.com/" executor = "kubernetes" token = "yrnZW46BrtBFqM7xDzE7dddd" [runners.kubernetes] image = "alpine:3.11" privileged = true namespace = "default" [runners.kubernetes.container_lifecycle.post_start.exec] command = ["touch", "/builds/postStart.txt"] [runners.kubernetes.container_lifecycle.pre_stop.http_get] port = 8080 host = "localhost" path = "/test" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_1" value = "header_value_1" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_2" value = "header_value_2" ``` Use the following settings to configure each lifecycle hook: | Option | Type | Required | Description | |--------------|---------------------------------|----------|-------------| | `exec` | `KubernetesLifecycleExecAction` | No | `Exec` specifies the action to take. | | `http_get` | `KubernetesLifecycleHTTPGet` | No | `HTTPGet` specifies the http request to perform. | | `tcp_socket` | `KubernetesLifecycleTcpSocket` | No | `TCPsocket` specifies an action involving a TCP port. | #### `KubernetesLifecycleExecAction` | Option | Type | Required | Description | |-----------|---------------|----------|-------------| | `command` | `string` list | Yes | The command line to execute inside the container. | #### `KubernetesLifecycleHTTPGet` | Option | Type | Required | Description | |----------------|-----------------------------------------|----------|-------------| | `port` | `int` | Yes | The number of the port to access on the container. | | `host` | string | No | The host name to connect to, defaults to the pod IP (optional). | | `path` | string | No | The path to access on the HTTP server (optional). | | `scheme` | string | No | The scheme used for connecting to the host. Defaults to HTTP (optional). | | `http_headers` | `KubernetesLifecycleHTTPGetHeader` list | No | Custom headers to set in the request (optional). | #### `KubernetesLifecycleHTTPGetHeader` | Option | Type | Required | Description | |---------|--------|----------|-------------| | `name` | string | Yes | HTTP header name. | | `value` | string | Yes | HTTP header value. | #### `KubernetesLifecycleTcpSocket` | Option | Type | Required | Description | |--------|--------|----------|-------------| | `port` | `int` | Yes | The number of the port to access on the container. | | `host` | string | No | The host name to connect to, defaults to the pod IP (optional). | ### Configure pod DNS settings Use the following options to configure the [DNS settings](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#pod-dns-config) of the pods. | Option | Type | Required | Description | |---------------|-----------------------------|----------|-------------| | `nameservers` | `string` list | No | A list of IP addresses that are used as DNS servers for the pod. | | `options` | `KubernetesDNSConfigOption` | No | A optional list of objects where each object may have a name property (required) and a value property (optional). | | `searches` | `string` lists | No | A list of DNS search domains for hostname lookup in the pod. | Example configuration in the `config.toml` file: ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "https://gitlab.example.com" token = "__REDACTED__" executor = "kubernetes" [runners.kubernetes] image = "alpine:latest" [runners.kubernetes.dns_config] nameservers = [ "1.2.3.4", ] searches = [ "ns1.svc.cluster-domain.example", "my.dns.search.suffix", ] [[runners.kubernetes.dns_config.options]] name = "ndots" value = "2" [[runners.kubernetes.dns_config.options]] name = "edns0" ``` #### `KubernetesDNSConfigOption` | Option | Type | Required | Description | |---------|-----------|----------|-------------| | `name` | string | Yes | Configuration option name. | | `value` | `*string` | No | Configuration option value. | #### Default list of dropped capabilities GitLab Runner drops the following capabilities by default. User-defined `cap_add` has priority over the default list of dropped capabilities. If you want to add the capability that is dropped by default, add it to `cap_add`. - `NET_RAW` ### Add extra host aliases This feature is available in Kubernetes 1.7 and higher. Configure a [host aliases](https://kubernetes.io/docs/tasks/network/customize-hosts-file-for-pods/) to instruct Kubernetes to add entries to `/etc/hosts` file in the container. Use the following options: | Option | Type | Required | Description | |-------------|---------------|----------|-------------| | `IP` | string | Yes | The IP address you want to attach hosts to. | | `Hostnames` | `string` list | Yes | A list of host name aliases that are attached to the IP. | Example configuration in the `config.toml` file: ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.host_aliases]] ip = "127.0.0.1" hostnames = ["web1", "web2"] [[runners.kubernetes.host_aliases]] ip = "192.168.1.1" hostnames = ["web14", "web15"] ``` You can also configure host aliases by using the command-line parameter `--kubernetes-host_aliases` with JSON input. For example: ```shell gitlab-runner register --kubernetes-host_aliases '[{"ip":"192.168.1.100","hostnames":["myservice.local"]},{"ip":"192.168.1.101","hostnames":["otherservice.local"]}]' ``` ## Volumes ### Using the cache with the Kubernetes executor When the cache is used with the Kubernetes executor, a volume called `/cache` is mounted on the pod. During job execution, if cached data is needed, the runner checks if cached data is available. Cached data is available if a compressed file is available on the cache volume. To set the cache volume, use the [`cache_dir`](../../configuration/advanced-configuration.md#the-runners-section) setting in the `config.toml` file. - If available, the compressed file is extracted into the build folder and can then be used in the job. - If not available, the cached data is downloaded from the configured storage and saved into the `cache dir` as a compressed file. The compressed file is then extracted into the `build` folder. ### Configure volume types You can mount the following volume types: - `hostPath` - `persistentVolumeClaim` - `configMap` - `secret` - `emptyDir` - `csi` Example of a configuration with multiple volume types: ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.volumes.host_path]] name = "hostpath-1" mount_path = "/path/to/mount/point" read_only = true host_path = "/path/on/host" [[runners.kubernetes.volumes.host_path]] name = "hostpath-2" mount_path = "/path/to/mount/point_2" read_only = true [[runners.kubernetes.volumes.pvc]] name = "pvc-1" mount_path = "/path/to/mount/point1" [[runners.kubernetes.volumes.config_map]] name = "config-map-1" mount_path = "/path/to/directory" [runners.kubernetes.volumes.config_map.items] "key_1" = "relative/path/to/key_1_file" "key_2" = "key_2" [[runners.kubernetes.volumes.secret]] name = "secrets" mount_path = "/path/to/directory1" read_only = true [runners.kubernetes.volumes.secret.items] "secret_1" = "relative/path/to/secret_1_file" [[runners.kubernetes.volumes.empty_dir]] name = "empty-dir" mount_path = "/path/to/empty_dir" medium = "Memory" [[runners.kubernetes.volumes.csi]] name = "csi-volume" mount_path = "/path/to/csi/volume" driver = "my-csi-driver" [runners.kubernetes.volumes.csi.volume_attributes] size = "2Gi" [[runners.kubernetes.volumes.nfs]] name = "nfs" mount_path = "/path/to/mount/point" read_only = false server = "foo.bar.com" path = "/path/on/nfs-share" ``` #### `hostPath` volume Configure the [`hostPath` volume](https://kubernetes.io/docs/concepts/storage/volumes/#hostpath) to instruct Kubernetes to mount a specified host path in the container. Use the following options in the `config.toml` file: | Option | Type | Required | Description | |---------------------|---------|----------|-------------| | `name` | string | Yes | The name of the volume. | | `mount_path` | string | Yes | The path where the volume is mounted in the container. | | `sub_path` | string | No | The [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) inside the mounted volume instead of its root. | | `host_path` | string | No | The path on the host mounted as a volume. If you don't specify a value, it defaults to the same path as `mount_path`. | | `read_only` | boolean | No | Sets the volume in read-only mode. Defaults to `false`. | | `mount_propagation` | string | No | Share mounted volumes between containers. For more information, see [Mount Propagation](https://kubernetes.io/docs/concepts/storage/volumes/#mount-propagation). | #### `persistentVolumeClaim` volume Configure the [`persistentVolumeClaim` volume](https://kubernetes.io/docs/concepts/storage/volumes/#persistentvolumeclaim) to instruct Kubernetes to use a `persistentVolumeClaim` defined in a Kubernetes cluster and mount it in the container. Use the following options in the `config.toml` file: | Option | Type | Required | Description | |---------------------|---------|----------|-------------| | `name` | string | Yes | The name of the volume and at the same time the name of `PersistentVolumeClaim` that should be used. Supports variables. For more information, see [Persistent per-concurrency build volumes](#persistent-per-concurrency-build-volumes). | | `mount_path` | string | Yes | Path in the container where the volume is mounted. | | `read_only` | boolean | No | Sets the volume to read-only mode (defaults to false). | | `sub_path` | string | No | Mount a [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) in the volume instead of the root. | | `mount_propagation` | string | No | Set the mount propagation mode for the volume. For more details, see [Kubernetes mount propagation](https://kubernetes.io/docs/concepts/storage/volumes/#mount-propagation). | #### `configMap` volume Configure the `configMap` volume to instruct Kubernetes to use a [`configMap`](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/) defined in a Kubernetes cluster and mount it in the container. Use the following options in the `config.toml`: | Option | Type | Required | Description | |--------------|---------------------|----------|-------------| | `name` | string | Yes | The name of the volume and at the same time the name of `configMap` that should be used. | | `mount_path` | string | Yes | Path in the container where the volume is mounted. | | `read_only` | boolean | No | Sets the volume to read-only mode (defaults to false). | | `sub_path` | string | No | Mount a [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) in the volume instead of the root. | | `items` | `map[string]string` | no | Key-to-path mapping for keys from the `configMap` that should be used. | Each key from the `configMap` is changed into a file and stored in the mount path. By default: - All keys are included. - The `configMap` key is used as the filename. - The value is stored in the file contents. To change the default key and value storage, use the `items` option. If you use the `items` option, **only specified keys** are added to the volumes and all other keys are skipped. > [!note] > If you use a key that doesn't exist, the job fails on the pod creation stage. #### `secret` volume Configure a [`secret` volume](https://kubernetes.io/docs/concepts/storage/volumes/#secret) to instruct Kubernetes to use a `secret` defined in a Kubernetes cluster and mount it in the container. Use the following options in the `config.toml` file: | Option | Type | Required | Description | |--------------|---------------------|----------|-------------| | `name` | string | Yes | The name of the volume and at the same time the name of _secret_ that should be used. | | `mount_path` | string | Yes | Path inside of container where the volume should be mounted. | | `read_only` | boolean | No | Sets the volume in read-only mode (defaults to false). | | `sub_path` | string | No | Mount a [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) in the volume instead of the root. | | `items` | `map[string]string` | No | Key-to-path mapping for keys from the configMap that should be used. | Each key from selected `secret` is changed into a file stored in the selected mount path. By default: - All keys are included. - The `configMap` key is used as the filename. - The value is stored in the file contents. To change default key and value storage, use the `items` option. If you use the `items` option, **only specified keys** are added to the volumes and all other keys are skipped. > [!note] > If you use a key that doesn't exist, the job fails on the pod creation stage. #### `emptyDir` volume Configure an [`emptyDir` volume](https://kubernetes.io/docs/concepts/storage/volumes/#emptydir) to instruct Kubernetes to mount an empty directory in the container. Use the following options in the `config.toml` file: | Option | Type | Required | Description | |---------------------|--------|----------|-------------| | `name` | string | Yes | The name of the volume. | | `mount_path` | string | Yes | Path inside of container where the volume should be mounted. | | `sub_path` | string | No | Mount a [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) in the volume instead of the root. | | `medium` | string | No | "Memory" provides a `tmpfs`, otherwise it defaults to the node disk storage (defaults to ""). | | `size_limit` | string | No | The total amount of local storage required for the `emptyDir` volume. | | `mount_propagation` | string | No | Set the mount propagation mode for the volume. For more details, see [Kubernetes mount propagation](https://kubernetes.io/docs/concepts/storage/volumes/#mount-propagation). | #### `csi` volume Configure a [Container Storage Interface (`csi`) volume](https://kubernetes.io/docs/concepts/storage/volumes/#csi) to instruct Kubernetes to use a custom `csi` driver to mount an arbitrary storage system in the container. Use the following options in the `config.toml`: | Option | Type | Required | Description | |---------------------|---------------------|----------|-------------| | `name` | string | Yes | The name of the volume. | | `mount_path` | string | Yes | Path inside of container where the volume should be mounted. | | `driver` | string | Yes | A string value that specifies the name of the volume driver to use. | | `fs_type` | string | No | A string value that specifies the name of the file system type (for example, `ext4`, `xfs`, `ntfs`). | | `volume_attributes` | `map[string]string` | No | Key-value pair mapping for attributes of the `csi` volume. | | `sub_path` | string | No | Mount a [sub-path](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath) in the volume instead of the root. | | `read_only` | boolean | No | Sets the volume in read-only mode (defaults to false). | ### Mount volumes on service containers Volumes defined for the build container are also automatically mounted for all services containers. You can use this functionality as an alternative to [`services_tmpfs`](../docker.md#mount-a-directory-in-ram) (available only to Docker executor), to mount database storage in RAM to speed up tests. Example configuration in the `config.toml` file: ```toml [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.volumes.empty_dir]] name = "mysql-tmpfs" mount_path = "/var/lib/mysql" medium = "Memory" ``` ### Custom volume mount To store the builds directory for the job, define custom volume mounts to the configured `builds_dir` (`/builds` by default). If you use [`pvc` volumes](https://kubernetes.io/docs/concepts/storage/persistent-volumes/), based on the [access mode](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes), you might be limited to running jobs on one node. Example configuration in the `config.toml` file: ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" builds_dir = "/builds" [runners.kubernetes] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/builds" medium = "Memory" ``` ### Persistent per-concurrency build volumes {{< history >}} - Support for variable injection to `pvc.name` [introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4256) in GitLab 16.3. {{< /history >}} The build directories in Kubernetes CI jobs are ephemeral by default. If you want to persist your Git clone across jobs (to make `GIT_STRATEGY=fetch` work), you must mount a persistent volume claim for your build folder. Because multiple jobs can run concurrently, you must either use a `ReadWriteMany` volume, or have one volume for each potential concurrent job on the same runner. The latter is likely to be more performant. Here is an example of such a configuration: ```toml concurrent = 4 [[runners]] executor = "kubernetes" builds_dir = "/mnt/builds" [runners.kubernetes] [[runners.kubernetes.volumes.pvc]] # CI_CONCURRENT_ID identifies parallel jobs of the same runner. name = "build-pvc-$CI_CONCURRENT_ID" mount_path = "/mnt/builds" ``` In this example, create the persistent volume claims named `build-pvc-0` to `build-pvc-3` yourself. Create as many as the runner's `concurrent` setting dictates. ### Use a helper image After you set the security policy, the [helper image](../../configuration/advanced-configuration.md#helper-image) must conform to the policy. The image does not receive privileges from the root group, so you must ensure that the user ID is part of the root group. > [!note] > If you only need the `nonroot` environment, you can use the [GitLab Runner UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766421) > OpenShift Container Platform images instead of a helper image. You can also use the [GitLab Runner Helper UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766433) > OpenShift Container Platform images. The following example creates a user and group called `nonroot` and sets the helper image to run as that user. ```Dockerfile ARG tag FROM registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp:${tag} USER root RUN groupadd -g 59417 nonroot && \ useradd -u 59417 nonroot -g nonroot WORKDIR /home/nonroot USER 59417:59417 ``` ## Using Docker in builds When you use Docker in your builds, there are several considerations you should be aware of. ### Exposed `/var/run/docker.sock` There is risk involved if you use the `runners.kubernetes.volumes.host_path` option to expose `/var/run/docker.sock` of your host into your build container. Be careful when you run builds in the same cluster as your production containers. The node's containers are accessible from the build container. ### Using `docker:dind` If you run the `docker:dind`, also called the `docker-in-docker` image, containers must run in privileged mode. This may have potential risks and cause additional issues. The Docker daemon runs as a separate container in the pod because it is started as a `service`, typically in the `.gitlab-ci.yml`. Containers in pods only share volumes assigned to them and an IP address, that they use to communicate with each other with `localhost`. The `docker:dind` container does not share `/var/run/docker.sock` and the `docker` binary tries to use it by default. To configure the client use TCP to contact the Docker daemon, in the other container, include the environment variables of the build container: - `DOCKER_HOST=tcp://docker:2375` for no TLS connection. - `DOCKER_HOST=tcp://docker:2376` for TLS connection. In Docker 19.03 and later, TLS is enabled by default but you must map certificates to your client. You can enable non-TLS connection for Docker-in-Docker or mount certificates. For more information, see [Use the Docker executor with Docker-in-Docker](https://docs.gitlab.com/ci/docker/using_docker_build/#use-the-docker-executor-with-docker-in-docker). ### Prevent host kernel exposure If you use `docker:dind` or `/var/run/docker.sock`, the Docker daemon has access to the underlying kernel of the host machine. This means that any `limits` set in the pod do not work when Docker images are built. The Docker daemon reports the full capacity of the node, regardless of limits imposed on the Docker build containers spawned by Kubernetes. If you run build containers in privileged mode, or if `/var/run/docker.sock` is exposed, the host kernel may become exposed to build containers. To minimize exposure, specify a label in the `node_selector` option. This ensures that the node matches the labels before any containers can be deployed to the node. For example, if you specify the label `role=ci`, the build containers only run on nodes labeled `role=ci`, and all other production services run on other nodes. To further separate build containers, you can use node [taints](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Taints prevent other pods from scheduling on the same nodes as the build pods, without extra configuration for the other pods. ### Restrict Docker images and services You can restrict the Docker images that are used to run your jobs. To do this, you specify wildcard patterns. For example, to allow images from your private Docker registry only: ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] ``` Or, to restrict to a specific list of images from this registry: ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_images = ["my.registry.tld:5000/ruby:*", "my.registry.tld:5000/node:*"] allowed_services = ["postgres:9.4", "postgres:latest"] ``` ### Restrict Docker pull policies In the `.gitlab-ci.yml` file, you can specify a pull policy. This policy determines how a CI/CD job should fetch images. To restrict which pull policies can be used from those specified in the `.gitlab-ci.yml` file, use `allowed_pull_policies`. For example, to allow only the `always` and `if-not-present` pull policies: ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_pull_policies = ["always", "if-not-present"] ``` - If you don't specify `allowed_pull_policies`, the default is the value in the `pull_policy` keyword. - If you don't specify `pull_policy`, the cluster's image [default pull policy](https://kubernetes.io/docs/concepts/containers/images/#updating-images) is used. - The job uses only the pull policies that are listed in both `pull_policy` and `allowed_pull_policies`. The effective pull policy is determined by comparing the policies in [`pull_policy` keyword](../docker.md#configure-how-runners-pull-images) and `allowed_pull_policies`. GitLab uses the [intersection](https://en.wikipedia.org/wiki/Intersection_(set_theory)) of these two policy lists. For example, if `pull_policy` is `["always", "if-not-present"]` and `allowed_pull_policies` is `["if-not-present"]`, then the job uses only `if-not-present` because it's the only pull policy defined in both lists. - The existing `pull_policy` keyword must include at least one pull policy specified in `allowed_pull_policies`. The job fails if none of the `pull_policy` values match `allowed_pull_policies`. ## Job execution GitLab Runner uses `kube attach` instead of `kube exec` by default. This should avoid problems like when a [job is marked successful midway](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4119) in environments with an unstable network. Follow [issue #27976](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27976) for progress on legacy execution strategy removal. ### Configure the number of request attempts to the Kubernetes API By default, the Kubernetes executor retries specific requests to the Kubernetes API after five failed attempts. The delay is controlled by a backoff algorithm with a 500 millisecond floor and a customizable ceiling with default value of two seconds. To configure the number of retries, use the `retry_limit` option in the `config.toml` file. Similarly, for backoff ceiling, use the `retry_backoff_max` option. The following failures are automatically retried: - `error dialing backend` - `TLS handshake timeout` - `read: connection timed out` - `connect: connection timed out` - `Timeout occurred` - `http2: client connection lost` - `connection refused` - `tls: internal error` - [`io.unexpected EOF`](https://pkg.go.dev/io#ErrUnexpectedEOF) - [`syscall.ECONNRESET`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.ECONNREFUSED`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.ECONNABORTED`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.EPIPE`](https://pkg.go.dev/syscall#pkg-constants) To control the amount of retries for each error, use the `retry_limits` option. The `rety_limits` specifies the amount of retries for each error separately, and is a map of error messages to the amount of retries. The error message can be a substring of the error message returned by the Kubernetes API. The `retry_limits` option has precedence over the `retry_limit` option. For example, configure the `retry_limits` option to retry the TLS related errors in your environment 10 times instead of the default five times: ```toml [[runners]] name = "myRunner" url = "https://gitlab.example.com/" executor = "kubernetes" [runners.kubernetes] retry_limit = 5 [runners.kubernetes.retry_limits] "TLS handshake timeout" = 10 "tls: internal error" = 10 ``` To retry an entirely different error, such as `exceeded quota` 20 times: ```toml [[runners]] name = "myRunner" url = "https://gitlab.example.com/" executor = "kubernetes" [runners.kubernetes] retry_limit = 5 [runners.kubernetes.retry_limits] "exceeded quota" = 20 ``` ### Container entrypoint known issues > [!note] > In GitLab 15.1 and later, the entrypoint defined in a Docker image is used with the Kubernetes executor when `FF_KUBERNETES_HONOR_ENTRYPOINT` is set. The container entry point has the following known issues: - If an entrypoint is defined in the Dockerfile for an image, it must open a valid shell. Otherwise, the job hangs. - To open a shell, the system passes the command as [`args`](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#entrypoint) for the build container. - [File type CI/CD variables](https://docs.gitlab.com/ci/variables/#use-file-type-cicd-variables) are not written to disk when the entrypoint is executed. The file is only accessible in the job during script execution. - The following CI/CD variables are not accessible in the entrypoint. You can use [`before_script`](https://docs.gitlab.com/ci/yaml/#beforescript) to make any setup changes before running script commands: - [CI/CD variables defined in the settings](https://docs.gitlab.com/ci/variables/#define-a-cicd-variable-in-the-ui). - [Masked CI/CD variables](https://docs.gitlab.com/ci/variables/#mask-a-cicd-variable). Before GitLab Runner 17.4: - The entrypoint logs were not forwarded to the build's log. - With the Kubernetes executor with `kube exec`, GitLab Runner did not wait for the entrypoint to open a shell (see earlier in this section). Starting with GitLab Runner 17.4, the entrypoint logs are now forwarded. The system waits for the entrypoint to run and spawn the shell. This has the following implications: - If `FF_KUBERNETES_HONOR_ENTRYPOINT` is set, and the image's entrypoint takes longer than `poll_timeout` (default: 180 s), the build fails. The `poll_timeout` value (and potentially `poll_interval`) must be adapted if the entrypoint is expected to run longer. - When `FF_KUBERNETES_HONOR_ENTRYPOINT` and `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` are set, the system adds a [startup probe](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-startup-probes) to the build container, so that it knows when the entrypoint is spawning the shell. If a custom entrypoint uses the provided `args` to spawn the expected shell, then the startup probe is resolved automatically. However, if the container image is spawning the shell without using the command passed in through `args`, the entrypoint must resolve the startup probe itself by creating a file named `.gitlab-startup-marker` inside the root of the build directory. The startup probe checks every `poll_interval` for the `.gitlab-startup-marker` file. If the file is not present in `poll_timeout`, the pod is considered unhealthy, and the system abort the build. ### Restrict access to job variables When using Kubernetes executor, users with access to the Kubernetes cluster can read variables used in the job. By default, job variables are stored in: - Pod's environment section To restrict access to job variable data, you should use role-based access control (RBAC). When you use RBAC, only GitLab administrators have access to the namespace used by the GitLab Runner. If you need other users to access the GitLab Runner namespace, set the following `verbs` to restrict the user access in the GitLab Runner namespace: - For `pods` and `configmaps`: - `get` - `watch` - `list` - For `pods/exec` and `pods/attach`, use `create`. Example RBAC definition for authorized users: ```yaml kind: Role apiVersion: rbac.authorization.k8s.io/v1 metadata: name: gitlab-runner-authorized-users rules: - apiGroups: [""] resources: ["configmaps", "pods"] verbs: ["get", "watch", "list"] - apiGroups: [""] resources: ["pods/exec", "pods/attach"] verbs: ["create"] ``` ## Resources check during prepare step Prerequisites: - `image_pull_secrets` or `service_account` is set. - `resource_availability_check_max_attempts` is set to a number greater than zero. - Kubernetes `serviceAccount` used with the `get` and `list` permissions. GitLab Runner checks if the new service accounts or secrets are available with a 5-second interval between each try. - This feature is disabled by default. To enable this feature, set `resource_availability_check_max_attempts` to any value other than `0`. The value you set defines the amount of times the runner checks for service accounts or secrets. ### Overwrite the Kubernetes namespace Prerequisites: - In the `values.yml` file for GitLab Runner Helm charts, `rbac.clusterWideAccess` is set to `true`. - The runner has [permissions](#configure-runner-api-permissions) configured in the core API group. You can overwrite Kubernetes namespaces to designate a namespace for CI purposes, and deploy a custom set of pods to it. The pods spawned by the runner are in the overwritten namespace to enable access between containers during the CI stages. To overwrite the Kubernetes namespace for each CI/CD job, set the `KUBERNETES_NAMESPACE_OVERWRITE` variable in the `.gitlab-ci.yml` file. ``` yaml variables: KUBERNETES_NAMESPACE_OVERWRITE: ci-${CI_COMMIT_REF_SLUG} ``` > [!note] > This variable does not create a namespace on your cluster. Ensure that the namespace exists before you run the job. To use only designated namespaces during CI runs, in the `config.toml` file, define a regular expression for `namespace_overwrite_allowed`: ```toml [runners.kubernetes] ... namespace_overwrite_allowed = "ci-.*" ``` ================================================ FILE: docs/executors/kubernetes/troubleshooting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Troubleshooting the Kubernetes executor --- The following errors are commonly encountered when using the Kubernetes executor. ## `Job failed (system failure): timed out waiting for pod to start` If the cluster cannot schedule the build pod before the timeout defined by `poll_timeout`, the build pod returns an error. The [Kubernetes Scheduler](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-lifetime) should be able to delete it. To fix this issue, increase the `poll_timeout` value in your `config.toml` file. ## `context deadline exceeded` The `context deadline exceeded` errors in job logs usually indicate that the Kubernetes API client hit a timeout for a given cluster API request. Check the [metrics of the `kube-apiserver` cluster component](https://kubernetes.io/docs/concepts/cluster-administration/system-metrics/) for any signs of: - Increased response latencies. - Error rates for common create or delete operations over pods, secrets, ConfigMaps, and other core (v1) resources. Logs for timeout-driven errors from the `kube-apiserver` operations may appear as: ```plaintext Job failed (system failure): prepare environment: context deadline exceeded Job failed (system failure): prepare environment: setting up build pod: context deadline exceeded ``` In some cases, the `kube-apiserver` error response might provide additional details of its sub-components failing (such as the Kubernetes cluster's `etcdserver`): ```plaintext Job failed (system failure): prepare environment: etcdserver: request timed out Job failed (system failure): prepare environment: etcdserver: leader changed Job failed (system failure): prepare environment: Internal error occurred: resource quota evaluates timeout ``` These `kube-apiserver` service failures can occur during the creation of the build pod and also during cleanup attempts after completion: ```plaintext Error cleaning up secrets: etcdserver: request timed out Error cleaning up secrets: etcdserver: leader changed Error cleaning up pod: etcdserver: request timed out, possibly due to previous leader failure Error cleaning up pod: etcdserver: request timed out Error cleaning up pod: context deadline exceeded ``` ## `Dial tcp xxx.xx.x.x:xxx: i/o timeout` This is a Kubernetes error that generally indicates the Kubernetes API server is unreachable by the runner manager. To resolve this issue: - If you use network security policies, grant access to the Kubernetes API, typically on port 443 or port 6443, or both. - Ensure that the Kubernetes API is running. ## Connection refused when attempting to communicate with the Kubernetes API When GitLab Runner makes a request to the Kubernetes API and it fails, it is likely because [`kube-apiserver`](https://kubernetes.io/docs/concepts/overview/components/#kube-apiserver) is overloaded and can't accept or process API requests. ## `Error cleaning up pod` and `Job failed (system failure): prepare environment: waiting for pod running` The following errors occur when Kubernetes fails to schedule the job pod in a timely manner. GitLab Runner waits for the pod to be ready, but it fails and then tries to clean up the pod, which can also fail. ```plaintext Error: Error cleaning up pod: Delete "https://xx.xx.xx.x:443/api/v1/namespaces/gitlab-runner/runner-0001": dial tcp xx.xx.xx.x:443 connect: connection refused Error: Job failed (system failure): prepare environment: waiting for pod running: Get "https://xx.xx.xx.x:443/api/v1/namespaces/gitlab-runner/runner-0001": dial tcp xx.xx.xx.x:443 connect: connection refused ``` To troubleshoot, check the Kubernetes primary node and all nodes that run a [`kube-apiserver`](https://kubernetes.io/docs/concepts/overview/components/#kube-apiserver) instance. Ensure they have all of the resources needed to manage the target number of pods that you hope to scale up to on the cluster. To change the time GitLab Runner waits for a pod to reach its `Ready` status, use the [`poll_timeout`](_index.md#other-configtoml-settings) setting. To better understand how pods are scheduled or why they might not get scheduled on time, [read about the Kubernetes Scheduler](https://kubernetes.io/docs/concepts/scheduling-eviction/kube-scheduler/). ## `request did not complete within requested timeout` The message `request did not complete within requested timeout` observed during build pod creation indicates that a configured [admission control webhook](https://kubernetes.io/docs/reference/access-authn-authz/extensible-admission-controllers/) on the Kubernetes cluster is timing out. Admission control webhooks are a cluster-level administrative control intercept for all API requests they're scoped for, and can cause failures if they do not execute in time. Admission control webhooks support filters that can finely control which API requests and namespace sources it intercepts. If the Kubernetes API calls from GitLab Runner do not need to pass through an admission control webhook then you may alter the [webhook's selector/filter configuration](https://kubernetes.io/docs/reference/access-authn-authz/extensible-admission-controllers/#matching-requests-objectselector) to ignore the GitLab Runner namespace, or apply exclusion labels/annotations over the GitLab Runner pod by configuring `podAnnotations` or `podLabels` in the [GitLab Runner Helm Chart `values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/57e026d7f43f63adc32cdd2b21e6d450abcf0686/values.yaml#L490-500). For example, to avoid [DataDog Admission Controller webhook](https://docs.datadoghq.com/containers/cluster_agent/admission_controller/?tab=operator) from intercepting API requests made by the GitLab Runner manager pod, the following can be added: ```yaml podLabels: admission.datadoghq.com/enabled: false ``` To list a Kubernetes cluster's admission control webhooks, run: ```shell kubectl get validatingwebhookconfiguration -o yaml kubectl get mutatingwebhookconfiguration -o yaml ``` The following forms of logs can be observed when an admission control webhook times out: ```plaintext Job failed (system failure): prepare environment: Timeout: request did not complete within requested timeout Job failed (system failure): prepare environment: setting up credentials: Timeout: request did not complete within requested timeout ``` A failure from an admission control webhook may instead appear as: ```plaintext Job failed (system failure): prepare environment: setting up credentials: Internal error occurred: failed calling webhook "example.webhook.service" ``` ## Error `Could not resolve host: example.com` If using the `alpine` flavor of the [helper image](../../configuration/advanced-configuration.md#helper-image), there can be [DNS issues](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4129) related to Alpine's `musl`'s DNS resolver. The error might look similar to: - `fatal: unable to access 'https://gitlab-ci-token:token@example.com/repo/proj.git/': Could not resolve host: example.com` Use the `helper_image_flavor = "ubuntu"` option to resolve this issue. ## `docker: Cannot connect to the Docker daemon at tcp://docker:2375. Is the docker daemon running?` This error can occur when [using Docker-in-Docker](_index.md#using-dockerdind) if attempts are made to access the DIND service before it has had time to fully start up. For a more detailed explanation, see [this issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27215). ## `curl: (35) OpenSSL SSL_connect: SSL_ERROR_SYSCALL in connection to github.com:443` This error can happen when [using Docker-in-Docker](_index.md#using-dockerdind) if the DIND Maximum Transmission Unit (MTU) is larger than the Kubernetes overlay network. DIND uses a default MTU of 1500, which is too large to route across the default overlay network. The DIND MTU can be changed within the service definition: ```yaml services: - name: docker:dind command: ["--mtu=1450"] ``` ## `MountVolume.SetUp failed for volume "kube-api-access-xxxxx" : chown is not supported by windows` When you run your CI/CD job, you might receive an error like the following: ```plaintext MountVolume.SetUp failed for volume "kube-api-access-xxxxx" : chown c:\var\lib\kubelet\pods\xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\volumes\kubernetes.io~projected\kube-api-access-xxxxx\..2022_07_07_20_52_19.102630072\token: not supported by windows ``` This issue occurs when you [use node selectors](_index.md#specify-the-node-to-execute-builds) to run builds on nodes with different operating systems and architectures. To fix the issue, configure `nodeSelector` so that the runner manager pod is always scheduled on a Linux node. For example, your [`values.yaml` file](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml) should contain the following: ```yaml nodeSelector: kubernetes.io/os: linux ``` ## Build pods are assigned the worker node's IAM role instead of Runner IAM role This issue happens when the worker node IAM role does not have the permission to assume the correct role. To fix this, add the `sts:AssumeRole` permission to the trust relationship of the worker node's IAM role: ```json { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam:::role/" }, "Action": "sts:AssumeRole" } ``` ## Error: `pull_policy ([Always]) defined in GitLab pipeline config is not one of the allowed_pull_policies` This issue happens if you specified a `pull_policy` in your `.gitlab-ci.yml` but there is no policy configured in the Runner's configuration file. The error might look similar to: - `Preparation failed: invalid pull policy for image 'image-name:latest': pull_policy ([Always]) defined in GitLab pipeline config is not one of the allowed_pull_policies ([])` To fix this issue, add `allowed_pull_policies` to your configuration according to [restrict Docker pull policies](_index.md#restrict-docker-pull-policies). ## Background processes cause jobs to hang and timeout Background processes started during job execution can [prevent the build job from exiting](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2880). To avoid this you can: - Double fork the process. For example, `command_to_run < /dev/null &> /dev/null &`. - Kill the process before exiting the job script. ## Cache-related `permission denied` errors Files and folders that are generated in your job have certain UNIX ownerships and permissions. When your files and folders are archived or extracted, UNIX details are retained. However, the files and folders may mismatch with the `USER` configurations of [helper images](../../configuration/advanced-configuration.md#helper-image). If you encounter permission-related errors in the `Creating cache ...` step, you can: - As a solution, investigate whether the source data is modified, for example in the job script that creates the cached files. - As a workaround, add matching [chown](https://linux.die.net/man/1/chown) and [chmod](https://linux.die.net/man/1/chmod) commands. to your [(`before_`/`after_`)`script:` directives](https://docs.gitlab.com/ci/yaml/#default). ## Apparently redundant shell process in build container with init system The process tree might include a shell process when either: - `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` is `false` and `FF_USE_DUMB_INIT_WITH_KUBERNETES_EXECUTOR` is `true`. - The `ENTRYPOINT` of the build image is an init system (like `tini-init` or `dumb-init`). ```shell UID PID PPID C STIME TTY TIME CMD root 1 0 0 21:58 ? 00:00:00 /scripts-37474587-5556589047/dumb-init -- sh -c if [ -x /usr/local/bin/bash ]; then .exec /usr/local/bin/bash elif [ -x /usr/bin/bash ]; then .exec /usr/bin/bash elif [ -x /bin/bash ]; then .exec /bin/bash elif [ -x /usr/local/bin/sh ]; then .exec /usr/local/bin/sh elif [ -x /usr/bin/sh ]; then .exec /usr/bin/sh elif [ -x /bin/sh ]; then .exec /bin/sh elif [ -x /busybox/sh ]; then .exec /busybox/sh else .echo shell not found .exit 1 fi root 7 1 0 21:58 ? 00:00:00 /usr/bin/bash <---------------- WHAT IS THIS??? root 26 1 0 21:58 ? 00:00:00 sh -c (/scripts-37474587-5556589047/detect_shell_script /scripts-37474587-5556589047/step_script 2>&1 | tee -a /logs-37474587-5556589047/output.log) & root 27 26 0 21:58 ? 00:00:00 \_ /usr/bin/bash /scripts-37474587-5556589047/step_script root 32 27 0 21:58 ? 00:00:00 | \_ /usr/bin/bash /scripts-37474587-5556589047/step_script root 37 32 0 21:58 ? 00:00:00 | \_ ps -ef --forest root 28 26 0 21:58 ? 00:00:00 \_ tee -a /logs-37474587-5556589047/output.log ``` This shell process, which might be `sh`, `bash` or `busybox`, with a `PPID` of 1 and a `PID` of 6 or 7, is the shell started by the shell detection script run by the init system (`PID` 1 above). The process is not redundant, and is the typical operation when the build container runs with an init system. ## Runner pod fails to run job results and times out despite successful registration After the runner pod registers with GitLab, it attempts to run a job but does not and the job eventually times out. The following errors are reported: ```plaintext There has been a timeout failure or the job got stuck. Check your timeout limits or try again. This job does not have a trace. ``` In this case, the runner might receive the error, ```plaintext HTTP 204 No content response code when connecting to the `jobs/request` API. ``` To troubleshoot this issue, manually send a POST request to the API to validate if the TCP connection is hanging. If the TCP connection is hanging, the runner might not be able to request CI job payloads. ## `failed to reserve container name` for init-permissions container when `gcs-fuse-csi-driver` is used The `gcs-fuse-csi-driver` `csi` driver [does not support mounting volumes for the init container](https://github.com/GoogleCloudPlatform/gcs-fuse-csi-driver/issues/38). This can cause failures starting the init container when using this driver. Features [introduced in Kubernetes 1.28](https://kubernetes.io/blog/2023/08/25/native-sidecar-containers/) must be supported in the driver's project to resolve this bug. ## Error: `only read-only root filesystem container is allowed` In clusters with admission policies that force containers to run on read-only mounted root filesystems, this error might appear when: - You install GitLab Runner. - GitLab Runner tries to schedule a build pod. These admission policies are usually enforced by an admission controller like [Gatekeeper](https://open-policy-agent.github.io/gatekeeper/website/) or [Kyverno](https://kyverno.io/). For example, a policy forcing containers to run on read-only root filesystems is the [`readOnlyRootFilesystem`](https://open-policy-agent.github.io/gatekeeper-library/website/validation/read-only-root-filesystem/) Gatekeeper policy. To resolve this issue: - All pods that are deployed to the cluster must adhere to the admission policies by setting `securityContext.readOnlyRootFilesystem` to `true` for their containers so the admission controller does not block the pod. - The containers must run successfully and be able to write to the filesystem even though the root file system is mounted read-only. ### For GitLab Runner If GitLab Runner is deployed with the [GitLab Runner Helm chart](../../install/kubernetes.md), you must update the GitLab chart configuration to have: - A proper `securityContext` value: ```yaml <...> securityContext: readOnlyRootFilesystem: true <...> ``` - A writable file system mounted where the pod can write: ```yaml <...> volumeMounts: - name: tmp-dir mountPath: /tmp volumes: - name: tmp-dir emptyDir: medium: "Memory" <...> ``` ### For the build pod To make the build pod run on a read-only root file system, configure the different containers' security contexts in `config.toml`. You can set the GitLab chart variable `runners.config`, which is passed to the build pod: ```yaml runners: config: | <...> [[runners]] [runners.kubernetes.build_container_security_context] read_only_root_filesystem = true [runners.kubernetes.init_permissions_container_security_context] read_only_root_filesystem = true [runners.kubernetes.helper_container_security_context,omitempty] read_only_root_filesystem = true # This section is only needed if jobs with services are used [runners.kubernetes.service_container_security_context,omitempty] read_only_root_filesystem = true <...> ``` To make the build pod and its containers run successfully on a read-only file system, you must have writable filesystems in locations where the build pod can write. At a minimum, these locations are the build and home directories. Ensure the build process has write access to other locations if necessary. The home directory must generally be writable so programs can store their configuration and other data they need for successful execution. The `git` binary is one example of a program that expects to be able to write to the home directory. To make the home directory writable regardless of its path in different container images: 1. Mount a volume on a stable path (regardless of which build image you use). 1. Change the home directory by setting the environment variable `$HOME` globally for all builds. You can configure the build pod and its containers in `config.toml` by updating the value of the GitLab chart variable `runners.config`. ```yaml runners: config: | <...> [[runners]] environment = ["HOME=/build_home"] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/builds" [[runners.kubernetes.volumes.empty_dir]] name = "build-home" mount_path = "/build_home" <...> ``` > [!note] > Instead of `emptyDir`, you can use any other > [supported volume types](_index.md#configure-volume-types). > Because all files that are not explicitly handled and stored as build > artifacts are usually ephemeral, `emptyDir` works for most cases. ## AWS EKS: Error cleaning up pod: pods "runner-**" not found or status is "Failed" The Amazon EKS zone rebalancing feature balances the availability zones in an autoscaling group. This feature might stop a node in one availability zone and create it in another. Runner jobs cannot be stopped and moved to another node. Disable this feature for runner jobs to resolve this error. ## Services not supported with Windows containers When attempting to use [services](https://docs.gitlab.com/ci/services/) on Windows nodes, they might fail with the following error: - `ERROR: Job failed (system failure): prepare environment: admission webhook "windows.common-webhooks.networking.gke.io" denied the request: spec.hostAliases: Invalid value: []v1.HostAlias{v1.HostAlias{IP:"127.0.0.1", Hostnames:[]string{""}}}: Windows does not support this field.` Depending on the Kubernetes runtime, the error could either be reported or silently ignored. For example, GKE does report the error. Services are implemented using `hostAlias` in Kubernetes executor, which is not supported in Windows containers. ================================================ FILE: docs/executors/kubernetes/use_podman_with_kubernetes.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Use Podman with GitLab Runner on Kubernetes --- Podman is an open-source [Open Container Initiative](https://opencontainers.org/) (OCI) tool for developing, managing, and running containers. Podman provides configurations that let you build container images in a CI job, without a root user or [privileged](../../security/_index.md#usage-of-docker-executor) escalation on the host. This document covers information about how to configure Podman to use it with GitLab Runner on OpenShift and non-OpenShift Kubernetes clusters. The configuration applies to container images set as a root and non-root user. ## Run Podman on non-OpenShift Kubernetes cluster ### Run Podman as a non-root user with the `--privileged` flag set to `true` > [!warning] > When you run Podman with the `--privileged` flag set to `true`, the container engine launches the container with or without any additional security controls. To run Podman as a non-root user with non-root container processes: 1. Create a container image with Podman using the following sample code in your `.gitlab-ci.yml` file: ```yaml variables: HOME: /my_custom_dir DOCKER_HOST: tcp://docker:2375 podman-privileged-test: image: quay.io/podman/stable before_script: - podman info - id script: - podman build . -t playground-bis:testing ``` You can also enable feature flags to adjust runner behavior for your environment. For more information, see [available feature flags](../../configuration/feature-flags.md#available-feature-flags). 1. Set the default `user_id` to `1000` by adding the following configurations to your `config.toml` file: ```ini [runners.kubernetes.pod_security_context] run_as_user = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 1000 ``` 1. Add the following runner configurations to your `config.toml` file: ```toml listen_address = ":9252" concurrent = 3 check_interval = 1 log_level = "debug" log_format = "runner" connection_max_age = "15m0s" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "investigation" limit = 50 url = "https://gitlab.com/" executor = "kubernetes" builds_dir = "/my_custom_dir" shell = "bash" [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false image = "" namespace = "" namespace_overwrite_allowed = "" namespace_per_job = false privileged = true node_selector_overwrite_allowed = ".*" node_tolerations_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/my_custom_dir" [runners.kubernetes.pod_security_context] run_as_user = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 1000 ``` If the jobs pass as expected, the job log should look like in the following example: ```shell ... $ podman build . -t playground-bis:testing STEP 1/6: FROM docker.io/library/golang:1.24.4 AS builder Trying to pull docker.io/library/golang:1.24.4... Getting image source signatures Copying blob sha256:6564e0d9b89ebe3e93013c7d7fbf4d560c5831ed61448167899654bf22c6dc59 Copying blob sha256:2b238499ec52e0d6be479f948c76ba0bc3cc282f612d5a6a4b5ef52ff45f6b2c Copying blob sha256:6d11c181ebb38ef30f2681a42f02030bc6fdcfbe9d5248270ee065eb7302b500 Copying blob sha256:600c2555aee6a6bed84df8b8e456b2d705602757d42f5009a41b03abceff02f8 Copying blob sha256:41b754d079e82fafdf15447cfc188868092eaf1cf4a3f96c9d90ab1b7db91230 Copying blob sha256:a355a3cac949bed5cda9c62103ceb0f004727cedcd2a17d7c9836aea1a452fda Copying blob sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 Copying config sha256:723e5b94e776fd1a0d4e9bb860400f02acbe62cdac487f114f5bd6303d76fbd9 Writing manifest to image destination STEP 2/6: WORKDIR "/workspace" --> 32b9a99335a7 STEP 3/6: COPY . . --> 3de77f571048 STEP 4/6: RUN go build -v main.go internal/unsafeheader internal/goarch internal/cpu internal/abi internal/bytealg internal/byteorder internal/chacha8rand internal/coverage/rtcov internal/godebugs internal/goexperiment internal/goos internal/profilerecord internal/runtime/atomic internal/runtime/syscall internal/stringslite internal/runtime/exithook runtime/internal/math runtime/internal/sys cmp internal/itoa internal/race runtime math/bits math unicode/utf8 sync/atomic unicode internal/asan internal/msan internal/reflectlite iter sync slices errors internal/bisect strconv io internal/oserror path internal/godebug syscall reflect time io/fs internal/filepathlite internal/syscall/unix internal/poll internal/fmtsort internal/syscall/execenv internal/testlog os fmt command-line-arguments --> 6340b6cccaa9 STEP 5/6: RUN ls -halF total 2.2M drwxr-xr-x 1 root root 4.0K Oct 3 15:14 ./ dr-xr-xr-x 1 root root 4.0K Oct 3 15:14 ../ drwxrwxrwx 6 root root 4.0K Oct 3 15:14 .git/ -rw-rw-rw- 1 root root 690 Oct 3 15:14 .gitlab-ci.yml -rw-rw-rw- 1 root root 1.8K Oct 3 15:14 Dockerfile -rw-rw-rw- 1 root root 74 Oct 3 15:14 Dockerfile_multistage -rw-rw-rw- 1 root root 18 Oct 3 15:14 README.md -rw-rw-rw- 1 root root 51 Oct 3 15:14 go.mod -rw-rw-rw- 1 root root 258 Oct 3 15:14 long-script-with-cleanup.sh -rwxr-xr-x 1 root root 2.1M Oct 3 15:14 main* -rw-rw-rw- 1 root root 157 Oct 3 15:14 main.go -rw-rw-rw- 1 root root 333 Oct 3 15:14 string_output.sh drwxrwxrwx 2 root root 4.0K Oct 3 15:14 test/ --> e3cce3e2b16a STEP 6/6: CMD ["exec", "main"] COMMIT playground-bis:testing --> 2bf7283ee21d Successfully tagged localhost/playground-bis:testing 2bf7283ee21dd86134fbda06a5835af4b68fe3dc6a3525b96587e14c40d7f1a3 Cleaning up project directory and file based variables 00:01 Job succeeded ``` ### Run Podman as a root user with the `--privileged` flag set to `false` Prerequisites: - Permission to use `fuse-overlayfs` inside the container. The following steps are inspired from the "Rootless Podman without the privileged flag" section of [How to use Podman inside of Kubernetes](https://www.redhat.com/en/blog/podman-inside-kubernetes). When running rootless Podman, you can remove the privileged flag by making a few adjustments to your system configuration. The container needs access to `/dev/fuse` to use `fuse-overlayfs` inside the container. You must also disable SELinux on the host running the Kubernetes cluster. SELinux prevents containerized processes from mounting the required file systems inside a container. To achieve this: 1. Create a device plugin that can be used by the job Pod, for example: ```yaml apiVersion: apps/v1 kind: DaemonSet metadata: name: fuse-device-plugin-daemonset namespace: kube-system spec: selector: matchLabels: name: fuse-device-plugin-ds template: metadata: labels: name: fuse-device-plugin-ds spec: hostNetwork: true containers: - image: soolaugust/fuse-device-plugin:v1.0 name: fuse-device-plugin-ctr securityContext: allowPrivilegeEscalation: false capabilities: drop: ["ALL"] volumeMounts: - name: device-plugin mountPath: /var/lib/kubelet/device-plugins volumes: - name: device-plugin hostPath: path: /var/lib/kubelet/device-plugins ``` 1. Configure the `config.toml` to install GitLab Runner on the cluster. - Set the job Pod to run as a `root` user with the `--privileged` flag set to `false`: ```toml allow_privilege_escalation = false [runners.kubernetes.pod_security_context] run_as_non_root = false [runners.kubernetes.build_container_security_context] run_as_user = 0 run_as_group = 0 ``` - Set a resource limit to the job Pod by using the [`pod_spec` feature](_index.md#overwrite-generated-pod-specifications). To use `pod_spec`, set the `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` feature flag to `true`. ```toml [[runners.kubernetes.pod_spec]] name = "device-fuse" patch_type = "strategic" patch = ''' containers: - name: build resources: limits: github.com/fuse: 1 ''' ``` The `config.toml` should look similar to: ```toml [[runners]] [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false pod_termination_grace_period_seconds = 0 namespace = "" namespace_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" node_selector_overwrite_allowed = ".*" allow_privilege_escalation = false [runners.kubernetes.pod_security_context] run_as_non_root = false [runners.kubernetes.build_container_security_context] run_as_user = 0 run_as_group = 0 [[runners.kubernetes.pod_spec]] name = "device-fuse" patch_type = "strategic" patch = ''' containers: - name: build resources: limits: github.com/fuse: 1 ''' ``` 1. Run the job to build an image with Podman. ```yaml variables: FF_USE_ADVANCED_POD_SPEC_CONFIGURATION: "true" podman-privileged-test: image: quay.io/podman/stable before_script: - podman info - id script: - podman build . -t playground-bis:testing ``` You can also enable feature flags to adjust runner behavior for your environment. For more information, see [available feature flags](../../configuration/feature-flags.md#available-feature-flags). The job runs `podman build`, which should complete successfully. ```shell ... $ podman build . -t playground-bis:testing time="2024-11-06T16:57:41Z" level=warning msg="Using cgroups-v1 which is deprecated in favor of cgroups-v2 with Podman v5 and will be removed in a future version. Set environment variable `PODMAN_IGNORE_CGROUPSV1_WARNING` to hide this warning." time="2024-11-06T16:57:41Z" level=warning msg="Using cgroups-v1 which is deprecated in favor of cgroups-v2 with Podman v5 and will be removed in a future version. Set environment variable `PODMAN_IGNORE_CGROUPSV1_WARNING` to hide this warning." STEP 1/6: FROM docker.io/library/golang:1.24.4 AS builder Trying to pull docker.io/library/golang:1.24.4... Getting image source signatures Copying blob sha256:32d3574b34bd65a6cf89a80e5bd939574c7a9bd3efbaa4881292aaca16d3d0dc Copying blob sha256:a47cff7f31e941e78bf63ca19f0811b675283e2c00ddea10c57f78d93b2bc343 Copying blob sha256:cdd62bf39133c498a16f7a7b1b6555ba43d02b2511c508fa4c0a9b1975ffe20e Copying blob sha256:1eb015951d08f558e9805d427f6d30728b0cd94d5c9b9538cd4f7df57598664a Copying blob sha256:a173f2aee8e962ea19db1e418ae84a0c9f71480b51f768a19332dfa83d7722a5 Copying blob sha256:e7bff916ab0c126c9d943f0c481a905f402e00f206a89248f257ef90beaabbd8 Copying blob sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 Copying config sha256:8027d6b1a7f0702ed8a4174fd022be03f87e35c7a7fa00afb2bf4178b22080d4 Writing manifest to image destination STEP 2/6: WORKDIR "/workspace" --> 94b34d00b2cb STEP 3/6: COPY . . --> b807785fe549 STEP 4/6: RUN go build -v main.go internal/goarch internal/unsafeheader internal/cpu internal/abi internal/bytealg internal/byteorder internal/chacha8rand internal/coverage/rtcov internal/godebugs internal/goexperiment internal/goos internal/profilerecord internal/runtime/atomic internal/runtime/syscall internal/runtime/exithook internal/stringslite runtime/internal/math runtime/internal/sys cmp internal/itoa internal/race runtime math/bits math unicode/utf8 sync/atomic unicode internal/asan internal/msan iter internal/reflectlite sync slices internal/bisect errors strconv io internal/oserror path internal/godebug reflect syscall time io/fs internal/fmtsort internal/filepathlite internal/syscall/unix internal/syscall/execenv internal/testlog internal/poll os fmt command-line-arguments --> 5c4fa8b22a3e STEP 5/6: RUN ls -halF total 2.1M drwxr-xr-x 4 root root 18 Nov 6 16:58 ./ dr-xr-xr-x 19 root root 6 Nov 6 16:58 ../ drwxrwxrwx 6 root root 128 Nov 6 16:57 .git/ -rw-rw-rw- 1 root root 743 Nov 6 16:57 .gitlab-ci.yml -rw-rw-rw- 1 root root 1.8K Nov 6 16:57 Dockerfile -rw-rw-rw- 1 root root 74 Nov 6 16:57 Dockerfile_multistage -rw-rw-rw- 1 root root 18 Nov 6 16:57 README.md -rw-rw-rw- 1 root root 51 Nov 6 16:57 go.mod -rw-rw-rw- 1 root root 258 Nov 6 16:57 long-script-with-cleanup.sh -rwxr-xr-x 1 root root 2.1M Nov 6 16:58 main* -rw-rw-rw- 1 root root 157 Nov 6 16:57 main.go -rw-rw-rw- 1 root root 333 Nov 6 16:57 string_output.sh drwxrwxrwx 2 root root 87 Nov 6 16:57 test/ --> 57bb3eb7e929 STEP 6/6: CMD ["exec", "main"] COMMIT playground-bis:testing --> 2cc55d032ba8 Successfully tagged localhost/playground-bis:testing 2cc55d032ba852e05c513e4067b55c10fd697c65e07ffe2aae104e8531702274 Cleaning up project directory and file based variables 00:00 Job succeeded ``` ## Run Podman as a non-root user on OpenShift To run rootless Podman without privileged containers, follow the steps in the RedHat article [Build container images in OpenShift using Podman as a GitLab Runner](https://developers.redhat.com/articles/2024/10/01/build-container-images-openshift-using-podman-gitlab-runner). ## Troubleshooting ### `git` cannot save the configuration in `/.gitconfig` when you run the job as a non-root user Because you are not running the job as root, `git` cannot save the configuration in `/.gitconfig`. As a result, you might encounter the following error: ```shell Getting source from Git repository 00:00 error: could not lock config file //.gitconfig: Permission denied ``` To prevent this error: 1. Mount an `emptyDir` volume on `/my_custom_dir`. 1. Set the `HOME` environment variable to the `/my_custom_dir` path. ================================================ FILE: docs/executors/parallels.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Parallels --- The Parallels executor uses the [Parallels Desktop](https://www.parallels.com/) virtualization software to run CI/CD jobs in virtual machines (VMs) on macOS. Parallels Desktop can run Windows, Linux, and other operating systems alongside macOS. The Parallels executor works similarly to the VirtualBox executor. It creates and manages virtual machines and executes your GitLab CI/CD jobs. Each job runs in a clean VM environment, providing isolation between builds. For configuration information, see [VirtualBox executor](virtualbox.md). > [!note] > Parallels executors do not support local cache. [Distributed cache](../configuration/speed_up_job_execution.md) is supported. ================================================ FILE: docs/executors/shell.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: The Shell executor --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can use the Shell executor to execute builds locally on the machine where GitLab Runner is installed. It supports all systems on which the Runner can be installed. That means that it's possible to use scripts generated for Bash, PowerShell Core, Windows PowerShell, and Windows Batch (deprecated). > [!note] > Ensure you meet [common prerequisites](_index.md#git-requirements-for-non-docker-executors) > on the machine where GitLab Runner uses the shell executor. ## Run scripts as a privileged user The scripts can be run as unprivileged user if the `--user` is added to the [`gitlab-runner run` command](../commands/_index.md#gitlab-runner-run). This feature is only supported by Bash. The source project is checked out to: `/builds////`. The caches for project are stored in `/cache//`. Where: - `` is the value of `--working-directory` as passed to the `gitlab-runner run` command or the current directory where the Runner is running - `` is a shortened version of the Runner's token (first 8 letters) - `` is the index of the runner from the list of all runners that run a build for the same project concurrently (accessible through the `CI_CONCURRENT_PROJECT_ID` [pre-defined variable](https://docs.gitlab.com/ci/variables/predefined_variables/)). - `` is the namespace where the project is stored on GitLab - `` is the name of the project as it is stored on GitLab To overwrite the `/builds` and ` title: SSH --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!note] > The SSH executor supports only scripts generated in Bash and the caching feature > is not supported. This executor allows you to execute builds on a remote machine by executing commands over SSH. > [!note] > Ensure you meet [common prerequisites](_index.md#git-requirements-for-non-docker-executors) > on any remote systems where GitLab Runner uses the SSH executor. ## Use the SSH executor To use the SSH executor, specify `executor = "ssh"` in the [`[runners.ssh]`](../configuration/advanced-configuration.md#the-runnersssh-section) section. For example: ```toml [[runners]] executor = "ssh" [runners.ssh] host = "example.com" port = "22" user = "root" password = "password" identity_file = "/path/to/identity/file" ``` You can use `password` or `identity_file` or both to authenticate against the server. GitLab Runner doesn't implicitly read `identity_file` from `/home/user/.ssh/id_(rsa|dsa|ecdsa)`. The `identity_file` needs to be explicitly specified. The project's source is checked out to: `~/builds////`. Where: - `` is a shortened version of the runner's token (first 8 letters) - `` is the index of the runner from the list of all runners that run a build for the same project concurrently (accessible through the `CI_CONCURRENT_PROJECT_ID` [pre-defined variable](https://docs.gitlab.com/ci/variables/predefined_variables/)). - `` is the namespace where the project is stored on GitLab - `` is the name of the project as it is stored on GitLab To overwrite the `~/builds` directory, specify the `builds_dir` options under `[[runners]]` section in [`config.toml`](../configuration/advanced-configuration.md). If you want to upload job artifacts, install `gitlab-runner` on the host you are connecting to through SSH. ## Configure strict host key checking SSH `StrictHostKeyChecking` is [enabled](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28192) by default. To disable SSH `StrictHostKeyChecking`, set `[runners.ssh.disable_strict_host_key_checking]` to `true`. The current default value is `false`. ================================================ FILE: docs/executors/virtualbox.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: VirtualBox --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!note] > The Parallels executor works the same as the VirtualBox executor. > Local cache is not supported. [Distributed cache](../configuration/speed_up_job_execution.md) is supported. VirtualBox allows you to use VirtualBox's virtualization to provide a clean build environment for every build. This executor supports all systems that can be run on VirtualBox. The only requirement is that the virtual machine exposes an SSH server and provides a shell compatible with Bash or PowerShell. > [!note] > Ensure you meet [common prerequisites](_index.md#git-requirements-for-non-docker-executors) > on any virtual machine where GitLab Runner uses the VirtualBox executor. ## Overview The project's source code is checked out to: `~/builds//`. Where: - `` is the namespace where the project is stored on GitLab - `` is the name of the project as it is stored on GitLab To override the `~/builds` directory, specify the `builds_dir` option under the `[[runners]]` section in [`config.toml`](../configuration/advanced-configuration.md). You can also define [custom build directories](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories) per job using the `GIT_CLONE_PATH`. ## Create a new base virtual machine 1. Install [VirtualBox](https://www.virtualbox.org). - If running from Windows and VirtualBox is installed at the default location (for example `%PROGRAMFILES%\Oracle\VirtualBox`), GitLab Runner automatically detects it. Otherwise, you must add the installation folder to the `PATH` environment variable of the `gitlab-runner` process. 1. Import or create a new virtual machine in VirtualBox 1. Configure Network Adapter 1 as "NAT" (that's currently the only way the GitLab Runner is able to connect over SSH into the guest) 1. (optional) Configure another Network Adapter as "Bridged networking" to get access to the internet from the guest (for example) 1. Log into the new virtual machine 1. If Windows VM, see [Checklist for Windows VMs](#checklist-for-windows-vms) 1. Install the OpenSSH server 1. Install all other dependencies required by your build 1. If you want to download or upload job artifacts, install `gitlab-runner` inside the VM 1. Log out and shut down the virtual machine It's completely fine to use automation tools like Vagrant to provision the virtual machine. ## Create a new runner 1. Install GitLab Runner on the host running VirtualBox 1. Register a new runner with `gitlab-runner register` 1. Select the `virtualbox` executor 1. Enter the name of the base virtual machine you created earlier (find it under the settings of the virtual machine **General > Basic > Name**) 1. Enter the SSH `user` and `password` or path to `identity_file` of the virtual machine ## How it works When a new build is started: 1. A unique name for the virtual machine is generated: `runner--concurrent-` 1. The virtual machine is cloned if it doesn't exist 1. The port-forwarding rules are created to access the SSH server 1. GitLab Runner starts or restores the snapshot of the virtual machine 1. GitLab Runner waits for the SSH server to become accessible 1. GitLab Runner creates a snapshot of the running virtual machine (this is done to speed up any next builds) 1. GitLab Runner connects to the virtual machine and executes a build 1. If enabled, artifacts upload is done using the `gitlab-runner` binary *inside* the virtual machine. 1. GitLab Runner stops or shuts down the virtual machine ## Checklist for Windows VMs To use VirtualBox with Windows, you can install Cygwin or PowerShell. ### Use Cygwin - Install [Cygwin](https://cygwin.com/) - Install `sshd` and Git from Cygwin (do not use *Git for Windows*, you will get lots of path issues!) - Install Git LFS - Configure `sshd` and set it up as a service (see [Cygwin wiki](https://cygwin.fandom.com/wiki/Sshd)) - Create a rule for the Windows Firewall to allow incoming TCP traffic on port 22 - Add the GitLab server(s) to `~/.ssh/known_hosts` - To convert paths between Cygwin and Windows, use [the `cygpath` utility](https://cygwin.fandom.com/wiki/Cygpath_utility) ### Use native OpenSSH and PowerShell - Install [PowerShell](https://learn.microsoft.com/en-us/powershell/scripting/install/install-powershell-on-windows?view=powershell-7.4) - Install and configure [OpenSSH](https://learn.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse?tabs=powershell#install-openssh-for-windows) - Install [Git for Windows](https://git-scm.com/) - Configure the [default shell as `pwsh`](https://learn.microsoft.com/en-us/windows-server/administration/OpenSSH/openssh-server-configuration#configuring-the-default-shell-for-openssh-in-windows). Update example with the correct full path: ```powershell New-ItemProperty -Path "HKLM:\SOFTWARE\OpenSSH" -Name DefaultShell -Value "$PSHOME\pwsh.exe" -PropertyType String -Force ``` - Add shell `pwsh` to [`config.toml`](../configuration/advanced-configuration.md) ================================================ FILE: docs/faq/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Troubleshooting GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} This section can assist when troubleshooting GitLab Runner. ## General troubleshooting tips ### View the logs The GitLab Runner service sends logs to syslog. To view the logs, see your distribution documentation. If your distribution includes the `journalctl` command, you can use the command to view the logs: ```shell journalctl --unit=gitlab-runner.service -n 100 --no-pager docker logs gitlab-runner-container # Docker kubectl logs gitlab-runner-pod # Kubernetes ``` ### Restart the service ```shell systemctl restart gitlab-runner.service ``` ### View the Docker machines ```shell sudo docker-machine ls sudo su - && docker-machine ls ``` ### Delete all Docker machines ```shell docker-machine rm $(docker-machine ls -q) ``` ### Apply changes to `config.toml` ```shell systemctl restart gitlab-runner.service docker-machine rm $(docker-machine ls -q) # Docker machine journalctl --unit=gitlab-runner.service -f # Tail the logs to check for potential errors ``` ## Confirm your GitLab and GitLab Runner versions GitLab aims to [guarantee backward compatibility](../_index.md#gitlab-runner-versions). However, as a first troubleshooting step, you should ensure your version of GitLab Runner is the same as your GitLab version. ## What does `coordinator` mean? The `coordinator` is the GitLab installation from which a job is requested. In other words, runner is an isolated agent that request jobs from the `coordinator` (GitLab installation through GitLab API). ## Where are logs stored when run as a service on Windows? - If GitLab Runner is running as a service on Windows, it creates system event logs. To view them, open the Event Viewer (from the Run menu, type `eventvwr.msc` or search for "Event Viewer"). Then go to **Windows Logs > Application**. The **Source** for Runner logs is `gitlab-runner`. If you are using Windows Server Core, run this PowerShell command to get the last 20 log entries: `get-eventlog Application -Source gitlab-runner -Newest 20 | format-table -wrap -auto`. ## Enable debug logging mode > [!warning] > Debug logging can be a serious security risk. The output contains the content of > all variables and other secrets available to the job. You should disable any log aggregation > that might transmit secrets to third parties. The use of masked variables allows secrets > to be protected in job log output, but not in container logs. ### In the command line From a terminal, logged in as root, run the following. > [!warning] > This should not be performed on runners with the [Shell executor](../executors/shell.md), because it redefines the `systemd` service > and runs all jobs as root. This poses security risks and changes to file ownership that makes it difficult to revert to a non privileged account. ```shell gitlab-runner stop gitlab-runner --debug run ``` ### In the GitLab Runner `config.toml` Debug logging can be enabled in the [global section of the `config.toml`](../configuration/advanced-configuration.md#the-global-section) by setting the `log_level` setting to `debug`. Add the following line at the very top of your `config.toml`, before/after the concurrent line: ```toml log_level = "debug" ``` ### In the Helm Chart If GitLab Runner is installed in a Kubernetes cluster using the [GitLab Runner Helm Chart](../install/kubernetes.md), to enable debug logging, set the `logLevel` option in the [`values.yaml` customization](../install/kubernetes.md#configure-gitlab-runner-with-the-helm-chart): ```yaml ## Configure the GitLab Runner logging level. Available values are: debug, info, warn, error, fatal, panic ## ref: https://docs.gitlab.com/runner/configuration/advanced-configuration/#the-global-section ## logLevel: debug ``` ## Correlation IDs in GitLab Runner logs GitLab Runner generates a correlation ID for each API request to trace interactions with GitLab. When the API response from GitLab includes a correlation ID in the `X-Request-Id` header, the value (typically in ULID format) is used in logs. If the response doesn't include a correlation ID, GitLab Runner uses the UUID it generated for the request (lowercase hex format without dashes). A fallback correlation ID indicates the request did not reach GitLab Workhorse. The issue likely occurred at an intermediate hop (such as a WAF, CDN, load balancer, or proxy). You can use correlation IDs to match log entries across components and trace request flows. Search for the `correlation_id` field in GitLab Runner logs and the corresponding ID in GitLab server logs to correlate events. Example log entries: ```plaintext # Valid correlation ID (ULID format from GitLab API response) Appending trace to coordinator...ok correlation_id=01KKDQ7P6TRW7Z6P2PWG5808EK job=101162491 status=202 Accepted # Fallback correlation ID (lowercase hex UUID without dashes, generated by runner) WARNING: Appending trace to coordinator... job failed correlation_id=21fe32aee0e146c194640b075c95ec7c job=101162868 status=403 Forbidden ``` ## Configure DNS for a Docker executor runner When you configure GitLab Runner with the Docker executor, Docker containers might fail to access GitLab, even when the host Runner daemon has access. This can happen when DNS is configured in the host but those configurations are not passed to the container. **Example**: GitLab service and GitLab Runner exist in two different networks that are bridged in two ways (for example, over the Internet and through a VPN). The runner's routing mechanism might query DNS through the default internet service instead of the DNS service over the VPN. This configuration would result in the following message: ```shell Created fresh repository. ++ echo 'Created fresh repository.' ++ git -c 'http.userAgent=gitlab-runner 16.5.0 linux/amd64' fetch origin +da39a3ee5e6b4b0d3255bfef95601890afd80709:refs/pipelines/435345 +refs/heads/master:refs/remotes/origin/master --depth 50 --prune --quiet fatal: Authentication failed for 'https://gitlab.example.com/group/example-project.git/' ``` In this case, the authentication failure is caused by a service in between the Internet and the GitLab service. This service uses separate credentials, which the runner could circumvent if they used the DNS service over the VPN. You can tell Docker which DNS server to use by using the `dns` configuration in the `[runners.docker]` section of [the Runner's `config.toml` file](../configuration/advanced-configuration.md#the-runnersdocker-section). ```toml dns = ["192.168.xxx.xxx","192.168.xxx.xxx"] ``` ## I'm seeing `x509: certificate signed by unknown authority` For more information, see [the self-signed certificates](../configuration/tls-self-signed.md). ## I get `Permission Denied` when accessing the `/var/run/docker.sock` If you want to use Docker executor, and you are connecting to Docker Engine installed on server. You can see the `Permission Denied` error. The most likely cause is that your system uses SELinux (enabled by default on CentOS, Fedora and RHEL). Check your SELinux policy on your system for possible denials. ## Docker-machine error: `Unable to query docker version: Cannot connect to the docker engine endpoint.` This error relates to machine provisioning and might be due to the following reasons: - There is a TLS failure. When `docker-machine` is installed, some certificates might be invalid. To resolve this issue, remove the certificates and restart the runner: ```shell sudo su - rm -r /root/.docker/machine/certs/* service gitlab-runner restart ``` After the runner restarts, it registers that the certificates are empty and recreates them. - The hostname is longer than the supported length in the provisioned machine. For example, Ubuntu machines have a 64 character limit for `HOST_NAME_MAX`. The hostname is reported by `docker-machine ls`. Check the `MachineName` in the runner configuration and reduce the hostname length if required. > [!note] > This error might have occurred before Docker was installed in the machine. ## `dialing environment connection: ssh: rejected: connect failed (open failed)` This error occurs when the Docker autoscaler cannot reach the Docker daemon on the target system when the connection is tunneled through SSH. Ensure that you can SSH to the target system and successfully run Docker commands, for example `docker info`. ## Adding an AWS Instance Profile to your autoscaled runners After you create an AWS IAM Role, in your IAM console, the role has a **Role ARN** and a **Instance Profile ARNs**. You must use the **Instance Profile** name, **not** the **Role Name**. Add the following value to your `[runners.machine]` section: `"amazonec2-iam-instance-profile=",` ## The Docker executor gets timeout when building Java project This most likely happens, because of the broken `aufs` storage driver: [Java process hangs on inside container](https://github.com/moby/moby/issues/18502). The best solution is to change the [storage driver](https://docs.docker.com/engine/storage/drivers/select-storage-driver/) to either OverlayFS (faster) or DeviceMapper (slower). Check this article about [configuring and running Docker](https://docs.docker.com/engine/daemon/) or this article about [control and configure with systemd](https://docs.docker.com/engine/daemon/proxy/#systemd-unit-file). ## I get 411 when uploading artifacts This happens due to fact that GitLab Runner uses `Transfer-Encoding: chunked` which is broken on early version of NGINX (). Upgrade your NGINX to newer version. For more information see this issue: ## I am seeing other artifact upload errors, how can I further debug this? Artifacts are uploaded directly from the build environment to the GitLab instance, bypassing the GitLab Runner process. For example: - With the Docker executor, uploads occur from the Docker container - With the Kubernetes executor, uploads occur from the build container in the build pod The network route from the build environment to the GitLab instance might be different from the GitLab Runner to the GitLab instance route. To enable artifact uploads, ensure that all components in the upload path allow POST requests from the build environment to the GitLab instance. By default, the artifact uploader logs the upload URL and the HTTP status code of the upload response. This information is not enough to understand which system caused an error or blocked artifact uploads. To troubleshoot artifact upload issues, [enable debug logging](https://docs.gitlab.com/ci/variables/#enable-debug-logging) for upload attempts to see upload response's headers and body. > [!note] > The response body length for artifact upload debug logging is capped at 512 bytes. > Enable logging only for debugging because sensitive data can be exposed in logs. If uploads reach GitLab but fail with an error status code (for example, produces a non-successful response status code), investigate the GitLab instance itself. For common artifact upload issues, see [GitLab documentation](https://docs.gitlab.com/administration/cicd/job_artifacts_troubleshooting/#job-artifact-upload-fails-with-error-500). ## `No URL provided, cache will not be download`/`uploaded` This error occurs when the GitLab Runner helper receives an invalid URL or does not have any pre-signed URLs to access a remote cache. Review each [cache-related `config.toml` entry](../configuration/advanced-configuration.md#the-runnerscache-section) and provider-specific keys and values. An invalid URL might be constructed from any item that does not follow the URL syntax requirements. Additionally, ensure that your helper `image` and `helper_image_flavor` match and are up-to-date. If there is a problem with the credentials configuration, a diagnostic error message is added to the GitLab Runner process log. ## Error: `warning: You appear to have cloned an empty repository.` When running `git clone` using HTTP(s) (with GitLab Runner or manually for tests) and you see the following output: ```shell $ git clone https://git.example.com/user/repo.git Cloning into 'repo'... warning: You appear to have cloned an empty repository. ``` Make sure that HTTP proxy configuration in your GitLab server installation is done properly. When using HTTP proxy with its own configuration, ensure that requests are proxied to the **GitLab Workhorse socket**, not the **GitLab Unicorn socket**. Git protocol through HTTP(S) is resolved by the GitLab Workhorse, so this is the **main entrypoint** of GitLab. If you are using a Linux package installation, but don't want to use the bundled NGINX server, see [using a non-bundled web-server](https://docs.gitlab.com/omnibus/settings/nginx/#use-a-non-bundled-web-server). In the GitLab Recipes repository there are [web-server configuration examples](https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server) for Apache and NGINX. If you are using GitLab installed from source, see the above documentation and examples. Make sure that all HTTP(S) traffic is going through the **GitLab Workhorse**. See [an example of a user issue](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1105). ## Error: `zoneinfo.zip: no such file or directory` error when using `Timezone` or `OffPeakTimezone` It's possible to configure the time zone in which `[[docker.machine.autoscaling]]` periods are described. This feature should work on most Unix systems out of the box. However, on some Unix systems and most non-Unix systems (like Windows, where GitLab Runner binaries are available), the runner might crash at start with an error: ```plaintext Failed to load config Invalid OffPeakPeriods value: open /usr/local/go/lib/time/zoneinfo.zip: no such file or directory ``` The error is caused by the `time` package in Go. Go uses the IANA Time Zone database to load the configuration of the specified time zone. On most Unix systems, this database is already present on one of well-known paths (`/usr/share/zoneinfo`, `/usr/share/lib/zoneinfo`, `/usr/lib/locale/TZ/`). Go's `time` package looks for the Time Zone database in all those three paths. If it doesn't find any of them, but the machine has a configured Go development environment, then it falls back to the `$GOROOT/lib/time/zoneinfo.zip` file. If none of those paths are present (for example on a production Windows host) the above error is thrown. In case your system has support for the IANA Time Zone database, but it's not available by default, you can try to install it. For Linux systems it can be done for example by: ```shell # on Debian/Ubuntu based systems sudo apt-get install tzdata # on RPM based systems sudo yum install tzdata # on Linux Alpine sudo apk add -U tzdata ``` If your system doesn't provide this database in a _native_ way, then you can make `OffPeakTimezone` working by following the steps below: 1. Downloading the [`zoneinfo.zip`](https://gitlab-runner-downloads.s3.amazonaws.com/latest/zoneinfo.zip). Starting with version v9.1.0 you can download the file from a tagged path. In that case you should replace `latest` with the tag name (for example, `v9.1.0`) in the `zoneinfo.zip` download URL. 1. Store this file in a well known directory. We're suggesting to use the same directory where the `config.toml` file is present. So for example, if you're hosting Runner on Windows machine and your configuration file is stored at `C:\gitlab-runner\config.toml`, then save the `zoneinfo.zip` at `C:\gitlab-runner\zoneinfo.zip`. 1. Set the `ZONEINFO` environment variable containing a full path to the `zoneinfo.zip` file. If you are starting the Runner using the `run` command, then you can do this with: ```shell ZONEINFO=/etc/gitlab-runner/zoneinfo.zip gitlab-runner run ``` or if using Windows: ```powershell C:\gitlab-runner> set ZONEINFO=C:\gitlab-runner\zoneinfo.zip C:\gitlab-runner> gitlab-runner run ``` If you are starting GitLab Runner as a system service then you must update or override the service configuration: - On Unix systems, modify the settings through your service manager software. - On Windows, add the `ZONEINFO` variable to the list of environment variables available for the GitLab Runner user through System Settings. ## Why can't I run more than one instance of GitLab Runner? You can, but not sharing the same `config.toml` file. Running multiple instances of GitLab Runner using the same configuration file can cause unexpected and hard-to-debug behavior. Only a single instance of GitLab Runner can use a specific `config.toml` file at one time. ## Jobs experience delays before starting If jobs from some projects experience significant delays before starting while jobs from other projects run immediately, you might be experiencing long polling issues. **Symptoms:** - Jobs are queued but take an unusually long time to start execution (typically matching your GitLab instance long polling timeout). - Some runners appear to be stuck while others process jobs normally. - GitLab Runner logs show `CONFIGURATION: Long polling issues detected`. **Cause:** This issue occurs when GitLab Runner workers get stuck in long polling requests to GitLab, which prevents other jobs from being processed promptly. These issues range from performance bottlenecks to complete deadlocks, depending on the configuration. The issue is related to the GitLab CI/CD long polling feature controlled by the GitLab Workhorse `apiCiLongPollingDuration` setting (default: 50s). **Solution:** These issues can occur in several configuration scenarios. For comprehensive information about the causes, configuration examples, and solutions, see the [Long polling issues](../configuration/advanced-configuration.md#long-polling-issues) section in the advanced configuration documentation. ## `Job failed (system failure): preparing environment:` This error is often due to your shell [loading your profile](../shells/_index.md#shell-profile-loading), and one of the scripts is causing the failure. Example of `dotfiles` that are known to cause failure: - `.bash_logout` - `.condarc` - `.rvmrc` SELinux can also be the culprit of this error. You can confirm this by looking at the SELinux audit log: ```shell sealert -a /var/log/audit/audit.log ``` ## Runner abruptly terminates after `Cleaning up` stage CrowdStrike Falcon Sensor has been reported to kill pods after the `Cleaning up files` stage of a job when the "container drift detection" setting was enabled. To ensure that jobs are able to complete, you must disable this setting. ## Job fails with `remote error: tls: bad certificate (exec.go:71:0s)` This error can occur when the system time changes significantly during a job that creates artifacts. Due to the change in system time, SSL certificates are expired, which causes an error when the runner attempts to uploads artifacts. To ensure SSL verification can succeed during artifact upload, change the system time to a valid date and time at the end of the job. Because the creation time of the artifacts file has also changed, they are automatically archived. ## Helm Chart: `ERROR .. Unauthorized` Before uninstalling or upgrading runners deployed with Helm, pause them in GitLab and wait for any jobs to complete. If you remove a runner pod with `helm uninstall` or `helm upgrade` while a job is running, `Unauthorized` errors like the following may occur when the job completes: ```plaintext ERROR: Error cleaning up pod: Unauthorized ERROR: Error cleaning up secrets: Unauthorized ERROR: Job failed (system failure): Unauthorized ``` This probably occurs because when the runner is removed, the role bindings are removed. The runner pod continues until the job completes, and then the runner tries to delete it. Without the role binding, the runner pod no longer has access. See [this issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/225) for details. ## Elasticsearch service startup error `max virtual memory areas vm.max_map_count [65530] is too low` On startup of an Elasticsearch service container, you might receive an error similar to: - `max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]` Elasticsearch has a `vm.max_map_count` requirement that has to be set on the instance on which Elasticsearch is run. See the [Elasticsearch documentation](https://www.elastic.co/docs/deploy-manage/deploy/self-managed/install-elasticsearch-docker-prod) for how to set this value correctly depending on the platform. ## Error: `Preparing the "docker+machine" executor ERROR: Preparation failed: exit status 1` This error can occur when the Docker machine is not able to successfully create the executor virtual machines. To get more information about the error, manually create the virtual machine with the same `MachineOptions` that you have defined in your `config.toml`. For example: `docker-machine create --driver=google --google-project=GOOGLE-PROJECT-ID --google-zone=GOOGLE-ZONE ...`. ## Error: `No unique index found for name` This error might occur when you create or update a runner and the database does not have a unique index for the `tags` table. In the GitLab UI, you might get a `Response not successful: Received status code 500` error. This issue might affect instances that have undergone multiple major upgrades over an extended period. To resolve this issue, consolidate any duplicate tags in the table with the [`gitlab:db:deduplicate_tags` Rake task](https://docs.gitlab.com/administration/raketasks/maintenance/#check-the-database-for-duplicate-cicd-tags). For more information, see [Rake tasks](https://docs.gitlab.com/administration/raketasks/). ## Error: `Not authorized to perform sts:AssumeRoleWithWebIdentity` If you configured an IAM role for your runner's Kubernetes ServiceAccount resource, but runner logs show that it is not able to perform `sts:AssumeRoleWithWebIdentity`, you might get an error that states: ```plaintext {"error":"Not authorized to perform sts:AssumeRoleWithWebIdentity","level":"error","msg":"error while generating S3 pre-signed URL","time":"2025-10-15T18:07:20Z"} ``` This issue occurs when you include `https://` in the `StringLike` or `StringEquals` condition of your IAM role's trusted entities configuration. To resolve this issue, remove `https://` from the OIDC URL: ```json "Action": "sts:AssumeRoleWithWebIdentity", "Condition": { "StringLike": { "oidc.eks..amazonaws.com/id/:sub": "system:serviceaccount::" } } ``` ================================================ FILE: docs/fleet_scaling/_index.md ================================================ --- stage: Verify group: CI Functions Platform info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Plan and operate a fleet of instance or group runners --- Apply these best practices and recommendations when scaling a fleet of runners in a shared service model. When you host a fleet of instance runners, you need a well-planned infrastructure that takes into consideration your: - Computing capacity. - Storage capacity. - Network bandwidth and throughput. - Type of jobs (including programming language, OS platform, and dependent libraries). Use these recommendations to develop a GitLab Runner deployment strategy based on your organization's requirements. ## Consider your workload and environment Before you deploy runners, consider your workload and environment requirements. - Create a list of the teams that you plan to onboard to GitLab. - Catalog the programming languages, web frameworks, and libraries in use at your organization. For example, Go, C++, PHP, Java, Python, JavaScript, React, Node.js. - Estimate the number of CI/CD jobs each team may execute per hour, per day. - Validate if any team has build environment requirements that cannot be addressed by using containers. - Validate if any team has build environment requirements that are best served by having runners dedicated to that team. - Estimate the compute capacity that you may need to support the expected demand. You might choose different infrastructure stacks to host different runner fleets. For example, you might need to deploy some runners in the public cloud and some on-premise. The performance of the CI/CD jobs on the runner fleet is directly related to the fleet's environment. If you are executing a large number of resource-intensive CI/CD jobs, hosting the fleet on a shared computing platform is not recommended. ## Runners, executors, and autoscaling capabilities The `gitlab-runner` executable runs your CI/CD jobs. Each runner is an isolated process that picks up requests for job executions and deals with them according to pre-defined configurations. As an isolated process, each runner can create "sub-processes" (also called "workers") to run jobs. ### Concurrency and limit - [Concurrency](../configuration/advanced-configuration.md#the-global-section): Sets the number of jobs that can run concurrently when you're using all of the configured runners on a host system. - [Limit](../configuration/advanced-configuration.md#the-runners-section): Sets the number of sub-processes that a runner can create to execute jobs simultaneously. The limit is different for autoscaling runners (like Docker Machine and Kubernetes) than it is for runners that don't autoscale. - On runners that do not autoscale, `limit` defines the capacity of the runner on a host system. - On autoscaling runners, `limit` is the number of runners you want to run in total. For more information about how `concurrency` , `limit`, and `request_concurrency` interact to control job flow, see the [KB article on GitLab Runner concurrency tuning](https://support.gitlab.com/hc/en-us/articles/21324350882076-GitLab-Runner-Concurrency-Tuning-Understanding-request-concurrency). ### Basic configuration: one runner manager, one runner For the most basic configuration, you install the GitLab Runner software on a supported compute architecture and operating system. For example, you might have an x86-64 virtual machine (VM) running Ubuntu Linux. After the installation is complete, you execute the runner registration command just once and you select the `shell` executor. Then you edit the runner `config.toml` file to set concurrency to `1`. ```toml concurrent = 1 [[runners]] name = "instance-level-runner-001" url = "" token = "" executor = "shell" ``` The GitLab CI/CD jobs that this runner can process are executed directly on the host system where you installed the runner. It's as if you were running the CI/CD job commands yourself in a terminal. In this case, because you only executed the registration command one time, the `config.toml` file contains only one `[[runners]]` section. Assuming you set the concurrency value to `1`, only one runner "worker" can execute CI/CD jobs for the runner process on this system. ### Intermediate configuration: one runner manager, multiple runners You can also register multiple runners on the same machine. When you do this, the runner's `config.toml` file has multiple `[[runners]]` sections in it. If all additional runner workers use the shell executor, and you update global `concurrent` setting value to `3`, the host can run maximum three jobs at once. ```toml concurrent = 3 [[runners]] name = "instance_level_shell_001" url = "" token = "" executor = "shell" [[runners]] name = "instance_level_shell_002" url = "" token = "" executor = "shell" [[runners]] name = "instance_level_shell_003" url = "" token = "" executor = "shell" ``` You can register many runner workers on the same machine, and each one is an isolated process. The performance of the CI/CD jobs for each worker is dependent on the compute capacity of the host system. ### Autoscaling configuration: one or more runner managers, multiple workers When GitLab Runner is set up for autoscaling, you can configure a runner to act as a manager of other runners. You can do this with the `docker-machine` or `kubernetes` executors. In this type of manager-only configuration, the runner agent is itself not executing any CI/CD jobs. #### Docker Machine executor With the [Docker Machine executor](../executors/docker_machine.md): - The runner manager provisions on-demand virtual machine instances with Docker. - On these VMs, GitLab Runner executes the CI/CD jobs using a container image that you specify in your `.gitlab-ci.yml` file. - You should test the performance of your CI/CD jobs on various machine types. - You should consider optimizing your compute hosts based on speed or cost. #### Kubernetes executor With the [Kubernetes executor](../executors/kubernetes/_index.md): - The runner manager provisions pods on the target Kubernetes cluster. - The CI/CD jobs are executed on each pod, which is comprised of multiple containers. - The pods used for job execution typically require more compute and memory resources than the pod that hosts the runner manager. #### Reusing a runner configuration Each runner manager associated with the same runner authentication token is assigned a `system_id` identifier. The `system_id` identifies the machine where the runner is being used. Runners registered with the same authentication token are grouped under a single runner entry by a unique `system_id.` Grouping similar runners under a single configuration simplifies runner fleet operations. Here is an example scenario where you can group similar runners under a single configuration: A platform administrator needs to provide multiple runners with the same underlying virtual machine instance sizes (2 vCPU, 8 GB RAM) using the tag `docker-builds-2vCPU-8GB`. They want at least two such runners, either for high availability or scaling. Instead of creating two distinct runner entries in the UI, administrators can create one runner configuration for all runners with the same compute instance size. They can reuse the authentication token for the runner configuration to register multiple runners. Each registered runner inherits the `docker-builds-2vCPU-8GB` tag. For all child runners of a single runner configuration, `system_id` acts as a unique identifier. Grouped runners can be reused to run different jobs by multiple runner managers. GitLab Runner generates the `system_id` at startup or when the configuration is saved. The `system_id` is saved to the `.runner_system_id` file in the same directory as the [`config.toml`](../configuration/advanced-configuration.md), and displays in job logs and the runner administration page. ##### Generating `system_id` identifiers To generate the `system_id`, GitLab Runner attempts to derive a unique system identifier from hardware identifiers (for instance, `/etc/machine-id` in some Linux distributions). If not successful, GitLab Runner uses a random identifier to generate the `system_id`. The `system_id` has one the following prefixes: - `r_`: GitLab Runner assigned a random identifier. - `s_`: GitLab Runner assigned a unique system identifier from hardware identifiers. It is important to take this into account when creating container images for example, so that the `system_id` is not hard-coded into the image. If the `system_id` is hard-coded, you cannot distinguish between hosts executing a given job. ##### Delete runners and runner managers To delete runners and runner managers registered with a runner registration token (deprecated), use the `gitlab-runner unregister` command. To delete runners and runner managers created with a runner authentication token, use the [UI](https://docs.gitlab.com/ci/runners/runners_scope/#delete-instance-runners) or [API](https://docs.gitlab.com/api/runners/#delete-a-runner). Runners created with a runner authentication token are reusable configurations that can be reused in multiple machines. If you use the [`gitlab-runner unregister`](../commands/_index.md#gitlab-runner-unregister) command, only the runner manager is deleted, not the runner. ## Configure instance runners Using instance runners in an autoscaling configuration (where a runner acts as a "runner manager") is an efficient and effective way to start. The compute capacity of the infrastructure stack where you host your VMs or pods depends on: - The requirements you captured when you were considering your workload and environment. - The technology stack you use to host your runner fleet. You might have to adjust your computing capacity after you start running CI/CD workloads and analyzing the performance over time. For configurations that use instance runners with an autoscaling executor, you must start with minimum two runner managers. The total number of runner managers you may need over time depends on: - The compute resources of the stack that hosts the runner managers. - The concurrency that you choose to configure for each runner manager. - The load that is generated by the CI/CD jobs that each manager is executing hourly, daily, and monthly. For example, on GitLab.com, we run seven runner managers with the Docker Machine executor. Each CI/CD job is executed in a Google Cloud Platform (GCP) `n1-standard-1` VM. With this configuration, we process millions of jobs per month. ## Monitoring runners An essential step in operating a runner fleet at scale is to set up and use the [runner monitoring](../monitoring/_index.md) capabilities included with GitLab. The following table includes a summary of GitLab Runner metrics. The list does not include the Go-specific process metrics. To view those metrics on a runner, execute the command as noted in [available metrics](../monitoring/_index.md#available-metrics). | Metric name | Description | |----------------------------------------------------------------|-------------| | `gitlab_runner_api_request_statuses_total` | The total number of API requests, partitioned by runner, endpoint, and status. | | `gitlab_runner_autoscaling_machine_creation_duration_seconds` | Histogram of machine creation time. | | `gitlab_runner_autoscaling_machine_states` | The number of machines per state in this provider. | | `gitlab_runner_concurrent` | The value of concurrent setting. | | `gitlab_runner_errors_total` | The number of caught errors. This metric is a counter that tracks log lines. The metric includes the label `level`. The possible values are `warning` and `error`. If you plan to include this metric, then use `rate()` or `increase()` when observing. In other words, if you notice that the rate of warnings or errors is increasing, then this could suggest an issue that needs further investigation. | | `gitlab_runner_jobs` | This shows how many jobs are being executed (with different scopes in the labels). | | `gitlab_runner_job_duration_seconds` | Histogram of job durations. | | `gitlab_runner_job_queue_duration_seconds` | A histogram representing job queue duration. | | `gitlab_runner_acceptable_job_queuing_duration_exceeded_total` | Counts how often jobs exceed the configured queuing time threshold. | | `gitlab_runner_job_stage_duration_seconds` | A histogram representing job duration across each stage. This metric is a **high cardinality metric**. For more information, see [high cardinality metrics section](#high-cardinality-metrics). | | `gitlab_runner_jobs_total` | This displays the total jobs executed. | | `gitlab_runner_job_execution_mode_total` | This displays the total jobs executed by mode (`steps` or `traditional`) and executor. | | `gitlab_runner_limit` | The current value of the limit setting. | | `gitlab_runner_request_concurrency` | The current number of concurrent requests for a new job. | | `gitlab_runner_request_concurrency_exceeded_total` | Count of excess requests above the configured `request_concurrency` limit. | | `gitlab_runner_version_info` | A metric with a constant `1` value labeled by different build stats fields. | | `process_cpu_seconds_total` | Total user and system CPU time spent in seconds. | | `process_max_fds` | Maximum number of open file descriptors. | | `process_open_fds` | Number of open file descriptors. | | `process_resident_memory_bytes` | Resident memory size in bytes. | | `process_start_time_seconds` | Start time of the process, measured in seconds from the Unix epoch. | | `process_virtual_memory_bytes` | Virtual memory size in bytes. | | `process_virtual_memory_max_bytes` | Maximum amount of virtual memory available in bytes. | ### Grafana dashboard configuration tips In this [public repository](https://gitlab.com/gitlab-com/runbooks/-/tree/master/dashboards/ci-runners) you can find the source code for the Grafana dashboards that we use to operate the runner fleet on GitLab.com. We track a lot of metrics for GitLab.com. As a large provider of cloud-based CI/CD, we need many different views into the system so we can debug issues. In most cases, self-managed runner fleets don't need to track the volume of metrics that we track with GitLab.com. #### Dashboard generation process Grafana accepts only JSON format, so you must convert the `jsonnet` files to JSON. The [runbooks repository](https://gitlab.com/gitlab-com/runbooks/-/tree/master/dashboards) contains automated scripts for GitLab infrastructure only. To generate these dashboards for your own environment: 1. Create dashboards using the `jsonnet` configuration language (`.dashboard.jsonnet` files). 1. Process `jsonnet` files with the `jsonnet` library to produce JSON output. 1. Upload the resulting JSON files to Grafana (using the API or UI). #### Available runner dashboards Here are a few essential dashboards that you should use to monitor your runner fleet: Jobs started on runners: - View an overview of the total jobs executed on your runner fleet for a selected time interval. - View trends in usage. You should analyze this dashboard weekly at a minimum. - Correlate this data with metrics like job duration to determine if you need configuration changes or capacity upgrades to meet your CI/CD job performance SLOs. Job duration: - Analyze the performance and scaling of your runner fleet. - Identify performance bottlenecks and optimization opportunities. Runner capacity: - View the number of jobs being executed divided by the value of limit or concurrent. - Determine if there is still capacity to execute additional jobs. - Plan for capacity upgrades based on utilization trends. Additional dashboards include: - Main Dashboard (`main.dashboard.jsonnet`): Overview of runner infrastructure and HAProxy metrics. - Business Metrics (`business-stats.dashboard.jsonnet`): Job statistics, finished job minutes, and runner saturation. - Autoscaling Algorithm (`autoscaling-algorithm.dashboard.jsonnet`): Visualization of autoscaling behavior and machine states. - Queuing Overview (`queuing-overview.dashboard.jsonnet`): Job queue depth and wait times. - Request Concurrency (`request-concurrency.dashboard.jsonnet`): Concurrent request analysis. - Deployment (`deployment.dashboard.jsonnet`): Deployment-related metrics. - Incident Dashboards: Specialized dashboards for troubleshooting autoscaling, database, application, and runner manager issues. Each dashboard includes descriptions and context in the source `jsonnet` files to explain what metrics are being displayed. ### Template variables Dashboards use Grafana template variables to create reusable dashboard templates across different contexts: - Environments: For example, `production`, `staging`, `development`. - Stage: For example, `main`, `canary`. - Type: For example, `ci`, `verify`. Varies by use case. - Shard: Optional. For distributed runner deployments. Organizations that implement these dashboards must adjust these variables to match their own environment structure. Update these variables in the Grafana dashboard settings after import. ### Supported runners These dashboards work with all GitLab Runner executor types: - Kubernetes - Shell - VM (Docker Machine) - Windows The metrics collection is executor-independent and available across all runner fleet types. ### Customize dashboards To modify dashboards for your environment: 1. Edit the `.dashboard.jsonnet` files in the `dashboards/ci-runners/` directory. 1. Use [Grafonnet library](https://grafana.github.io/grafonnet-lib/) syntax (built on `jsonnet`). 1. Test the changes using the playground: ```shell ./test-dashboard.sh dashboards/ci-runners/your-dashboard.dashboard.jsonnet ``` 1. Regenerate and deploy using `./generate-dashboards.sh`. For more information, see the [video guide on extending dashboards](https://www.youtube.com/watch?v=yZ2RiY_Akz0). ### Considerations for monitoring runners on Kubernetes For runner fleets hosted on Kubernetes platforms like OpenShift, EKS, or GKE, use a different approach to set up Grafana dashboards. On Kubernetes, runner CI/CD job execution pods can be created and deleted frequently. In these cases, you should plan to monitor the runner manager pod and potentially implement the following: - Gauges: Display the aggregate of the same metric from different sources. - Counters: Reset the counter when applying `rate` or `increase` functions. ## High cardinality metrics Some metrics can be resource-intensive to ingest and store due to their high cardinality. High cardinality occurs when a metric includes labels that have many possible values, leading to a large number of unique time series data points. To optimize performance, such metrics are not enabled by default and can be toggled by using the [FF_EXPORT_HIGH_CARDINALITY_METRICS feature flag](../configuration/feature-flags.md). ### List of high cardinality metrics - `gitlab_runner_job_stage_duration_seconds`: Measures the duration of individual job stages in seconds. This metric includes the `stage` label, which can have the following predefined values: - `resolve_secrets` - `prepare_executor` - `prepare_script` - `get_sources` - `clear_worktree` - `restore_cache` - `download_artifacts` - `after_script` - `step_script` - `archive_cache` - `archive_cache_on_failure` - `upload_artifacts_on_success` - `upload_artifacts_on_failure` - `cleanup_file_variables` Additionally, this list may include custom user-defined steps such as `step_run`. ### Managing high cardinality metrics You can control and reduce cardinality by using [Prometheus relabel configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config) to remove unnecessary label values or the entire metrics. #### Example configuration to remove specific stages The following configuration removes any metrics with the `prepare_executor` value in the `stage` label: ```yaml scrape_configs: - job_name: 'gitlab_runner_metrics' static_configs: - targets: ['localhost:9252'] metric_relabel_configs: - source_labels: [__name__, "stage"] regex: "gitlab_runner_job_stage_duration_seconds;prepare_executor" action: drop ``` #### Example to keep only relevant stages The following configuration keeps only the metrics for the `step_script` stage and discards other metrics entirely: ```yaml scrape_configs: - job_name: 'gitlab_runner_metrics' static_configs: - targets: ['localhost:9252'] metric_relabel_configs: - source_labels: [__name__, "stage"] regex: "gitlab_runner_job_stage_duration_seconds;step_script" action: keep ``` ================================================ FILE: docs/fleet_scaling/fleeting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Fleeting --- [Fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting) is a library that GitLab Runner uses to provide a plugin-based abstraction for a cloud provider's instance groups. The following executors use fleeting to scale runners: - [Docker Autoscaler](../executors/docker_autoscaler.md) - [Instance](../executors/instance.md) ## Find a fleeting plugin GitLab maintains these official plugins: | Cloud provider | Notes | |----------------------------------------------------------------------------|-------| | [Google Cloud](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud) | Uses [Google Cloud instance groups](https://docs.cloud.google.com/compute/docs/instance-groups) | | [AWS](https://gitlab.com/gitlab-org/fleeting/plugins/aws) | Uses [AWS Auto Scaling groups](https://docs.aws.amazon.com/autoscaling/ec2/userguide/auto-scaling-groups.html) | | [Azure](https://gitlab.com/gitlab-org/fleeting/plugins/azure) | Uses Azure [Virtual Machine Scale Sets](https://learn.microsoft.com/en-us/azure/virtual-machine-scale-sets/overview). Only [Uniform orchestration](https://learn.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-orchestration-modes#scale-sets-with-uniform-orchestration) mode is supported. | The following plugins are community maintained: | Cloud provider | OCI Reference | Notes | |----------------|---------------|-------| | [VMware vSphere](https://gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere) | `registry.gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere:latest` | Uses VMware vSphere to create and manage virtual machines by cloning from an existing template. Tested with [`govmomi vcsim`](https://github.com/vmware/govmomi/tree/main/vcsim) simulator and validated by community members against basic use cases. It might have limitations with restricted vSphere permissions. You can create related issues in the [Fleeting Plugin VMware vSphere project](https://gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere/-/issues).| Community maintained plugins are owned, built, hosted, and maintained by contributors outside of GitLab (the community). GitLab owns and maintains the Fleeting library and API to provide static code review. GitLab cannot test community plugins because we don't have access to all the necessary computing environments. Community members should build, test, and publish plugins to an OCI repository and provide the reference on this page through merge requests. The OCI reference should be accompanied by notes on the where to report issues, the support and stability level of the plugin, and where to find documentation. ## Configure a fleeting plugin To configure fleeting, in the `config.toml`, use the [`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section) configuration section. > [!note] > The README.md file for each plugin contains important information regarding installation and configuration. ## Install a fleeting plugin To install a fleeting plugin, use either the: - OCI registry distribution (recommended) - Manual binary installation ## Install with the OCI registry distribution {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4690) OCI registry distribution in GitLab Runner 16.11 {{< /history >}} Plugins are installed to `~/.config/fleeting/plugins` on UNIX systems, and `%APPDATA%/fleeting/plugins` on Windows. To override where plugins are installed, update the environment variable `FLEETING_PLUGIN_PATH`. To install the fleeting plugin: 1. In the `config.toml`, in the `[runners.autoscaler]` section, add the fleeting plugin: {{< tabs >}} {{< tab title="AWS" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "aws:latest" ``` {{< /tab >}} {{< tab title="Google Cloud" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "googlecloud:latest" ``` {{< /tab >}} {{< tab title="Azure" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "azure:latest" ``` {{< /tab >}} {{< /tabs >}} 1. Run `gitlab-runner fleeting install`. ### `plugin` formats The `plugin` parameter supports the following formats: - `` - `:` - `/` - `/:` - `//` - `//:` Where: - `registry.gitlab.com` is the default registry. - `gitlab-org/fleeting/plugins` is the default repository. - `latest` is the default version. ### Version constraint formats The `gitlab-runner fleeting install` command uses the version constraint to find the latest matching version in the remote repository. When GitLab Runner runs, it uses the version constraint to find the latest matching version that is installed locally. Use the following version constraint formats: | Format | Description | |---------------------------|-------------| | `latest` | Latest version. | | `` | Selects the major version. For example, `1` selects the version that matches `1.*.*`. | | `.` | Selects the major and minor version. For example, `1.5` selects the latest version that matches `1.5.*`. | | `..` | Selects the major and minor version, and patch. For example, `1.5.1` selects the version `1.5.1`. | ## Install binary manually To manually install a fleeting plugin: 1. Download the fleeting plugin binary for your system: - [AWS](https://gitlab.com/gitlab-org/fleeting/plugins/aws/-/releases). - [Google Cloud](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud/-/releases) - [Azure](https://gitlab.com/gitlab-org/fleeting/plugins/azure/-/releases) 1. Ensure the binary has a name in the format of `fleeting-plugin-`. For example, `fleeting-plugin-aws`. 1. Ensure the binary can be discovered from `$PATH`. For example, move it to `/usr/local/bin`. 1. In the `config.toml`, in the `[runners.autoscaler]` section, add the fleeting plugin. For example: {{< tabs >}} {{< tab title="AWS" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-aws" ``` {{< /tab >}} {{< tab title="Google Cloud" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-googlecloud" ``` {{< /tab >}} {{< tab title="Azure" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-azure" ``` {{< /tab >}} {{< /tabs >}} ## Fleeting plugin management Use the following `fleeting` subcommands to manage fleeting plugins: | Command | Description | |----------------------------------|-------------| | `gitlab-runner fleeting install` | Install the fleeting plugin from the OCI registry distribution. | | `gitlab-runner fleeting list` | List referenced plugins and the version used. | | `gitlab-runner fleeting login` | Sign in to private registries. | ================================================ FILE: docs/grit/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner Infrastructure Toolkit --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated - Status: Experiment {{< /details >}} The [GitLab Runner Infrastructure Toolkit (GRIT)](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit) is a library of Terraform modules you can use to create and manage many common runner configurations on public cloud providers. > [!note] > This feature is an [experiment](https://docs.gitlab.com/policy/development_stages_support/#experiment). For more information about the state of GRIT development, see [epic 1](https://gitlab.com/groups/gitlab-org/ci-cd/runner-tools/-/epics/1). To provide feedback on this feature, leave a comment on [issue 84](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/issues/84). ## Create a runner with GRIT To use GRIT to deploy an autoscaling Linux Docker in AWS: 1. Set the following variables to provide access to GitLab and AWS: - `GITLAB_TOKEN` - `AWS_REGION` - `AWS_SECRET_ACCESS_KEY` - `AWS_ACCESS_KEY_ID` 1. Download the latest [GRIT release](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/releases) and extract to `.local/grit`. 1. Create a `main.tf` Terraform module: ```hcl module "runner" { source = ".local/grit/scenarios/aws/linux/docker-autoscaler-default" name = "grit-runner" gitlab_project_id = "39258790" # gitlab.com/josephburnett/hello-runner runner_description = "Autoscaling Linux Docker runner on AWS deployed with GRIT. " runner_tags = ["aws", "linux"] max_instances = 5 min_support = "experimental" } ``` 1. Initialize and apply the module: ```plaintext terraform init terraform apply ``` These steps create a new runner in a GitLab project. The runner manager uses the `docker-autoscaler` executor to run jobs tagged as `aws` and `linux`. The runner provisions between 1 and 5 VMs through a new Autoscaling Group (ASG), based on workload. The ASG uses a public AMI owned by the runner team. Both the runner manager and the ASG operate in a new VPC. All resources are named based on the provided value (`grit-runner`), which lets you create multiple instances of this module with different names in a single AWS project. ## Support levels and the `min_support` parameter You must provide a `min_support` value for all GRIT modules. This parameter specifies the minimum support level that the operator requires for their deployment. GRIT modules are associated with a support designation of `none`, `experimental`, `beta`, or `GA`. The goal is for all modules to reach the `GA` status. `none` is a special case. Modules with no support guarantees, primarily for testing and development. `experimental`, `beta`, and `ga` modules conform to the [GitLab definitions of development stages](https://docs.gitlab.com/policy/development_stages_support/). ### Shared responsibility model GRIT operates under a shared responsibility model between Authors (module developers) and Operators (those deploying with GRIT). For details on the specific responsibilities of each role and how support levels are determined, see the [Shared responsibility section](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/GORP.md#shared-responsibility) in the GORP documentation. ## Manage runner state To maintain runners: 1. Check the module into a GitLab project. 1. Store the Terraform state in the GitLab Terraform `backend.tf`: ```hcl terraform { backend "http" {} } ``` 1. Apply the changes by using `.gitlab-ci.yml`: ```yaml terraform-apply: variables: TF_HTTP_LOCK_ADDRESS: "https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/terraform/state/${NAME}/lock" TF_HTTP_UNLOCK_ADDRESS: ${TF_HTTP_LOCK_ADDRESS} TF_HTTP_USERNAME: ${GITLAB_USER_LOGIN} TF_HTTP_PASSWORD: ${GITLAB_TOKEN} TF_HTTP_LOCK_METHOD: POST TF_HTTP_UNLOCK_METHOD: DELETE script: - terraform init - terraform apply -auto-approve ``` ### Delete a runner To remove the runner and its infrastructure: ```plaintext terraform destroy ``` ## Supported configurations | Provider | Service | Arch | OS | Executors | Feature Support | |--------------|---------|--------|-------|-------------------|-----------------| | AWS | EC2 | x86-64 | Linux | Docker Autoscaler | Experimental | | AWS | EC2 | Arm64 | Linux | Docker Autoscaler | Experimental | | Google Cloud | GCE | x86-64 | Linux | Docker Autoscaler | Experimental | | Google Cloud | GKE | x86-64 | Linux | Kubernetes | Experimental | ## Advanced Configuration ### Top-Level Modules Top-level modules in a provider represent highly-decoupled or optional configuration aspects of runner. For example, `fleeting` and `runner` are separate modules because they share only access credentials and instance group names. The `vpc` is a separate module because some users provide their own VPC. Users with existing VPCs need only create a matching input structure to connect with other GRIT modules. For example, the top-level VPC module can be used to create a VPC for modules that require a VPC: ```hcl module "runner" { source = ".local/grit/modules/aws/runner" vpc = { id = module.vpc.id subnet_ids = module.vpc.subnet_ids } # ...additional config omitted } module "vpc" { source = ".local/grit/modules/aws/vpc" zone = "us-east-1b" cidr = "10.0.0.0/16" subnet_cidr = "10.0.0.0/24" } ``` User can provide their own VPC and not use GRIT's VPC module: ```hcl module "runner" { source = ".local/grit/modules/aws/runner" vpc = { id = PREEXISTING_VPC_ID subnet_ids = [PREEXISTING_SUBNET_ID] } # ...additional config omitted } ``` ## Contributing to GRIT GRIT welcomes community contributions. Before contributing, review the following resources: ### Developer Certificate of Origin and license All contributions to GRIT are subject to the [Developer Certificate of Origin and license](https://docs.gitlab.com/legal/developer_certificate_of_origin/). By contributing, you accept and agree to these terms and conditions for your present and future contributions submitted to GitLab Inc. ### Code of Conduct GRIT follows the GitLab Code of Conduct, which is adapted from the [Contributor Covenant](https://www.contributor-covenant.org). The project is committed to making participation a harassment-free experience for everyone, regardless of background or identity. ### Contribution guidelines When contributing to GRIT, follow these guidelines: - Review the [GORP Guidelines](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/GORP.md) for overall architectural design. - Adhere to [Google's best practices for using Terraform](https://docs.cloud.google.com/docs/terraform/best-practices/general-style-structure). - Follow the composable module approach to reduce complexity and repetition. - Include appropriate Go tests for your contributions. ### Testing and linting GRIT uses several testing and linting tools to ensure quality: - Integration tests: Uses [Terratest](https://terratest.gruntwork.io/) to validate Terraform plans. - End-to-end tests: Available in the [e2e directory](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/e2e/README.md). - Terraform linting: Uses `tflint`, `terraform fmt`, and `terraform validate`. - Go linting: Uses [golangci-lint](https://golangci-lint.run/) for Go code (primarily tests). - Documentation: Follows the [GitLab documentation style guide](https://docs.gitlab.com/development/documentation/styleguide/) and uses `vale` and `markdownlint`. For detailed instructions on setting up your development environment, running tests, and linting, see [CONTRIBUTING.md](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/CONTRIBUTING.md). ## Who uses GRIT? GRIT has been adopted by various teams and services within the GitLab ecosystem: - **[GitLab Dedicated](https://about.gitlab.com/dedicated/)**: [Hosted runners for GitLab Dedicated](https://docs.gitlab.com/administration/dedicated/hosted_runners/) uses GRIT to provision and manage runner infrastructure. - **GitLab Self-Managed**: GRIT is highly requested among many GitLab Self-Managed customers. Some organizations have started to adopt GRIT to manage their runner deployments in a standardized way. If you're using GRIT in your organization and would like to be featured in this section, open a merge request! ================================================ FILE: docs/install/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Software for CI/CD jobs. title: Install GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner) runs the CI/CD jobs defined in GitLab. GitLab Runner can run as a single binary and has no language-specific requirements. For security and performance reasons, install GitLab Runner on a machine separate from the machine that hosts your GitLab instance. Before you install, review the [system requirements and supported platforms](requirements.md). ## Operating systems {{< cards >}} - [Linux](linux-repository.md) - [Linux manual install](linux-manually.md) - [FreeBSD](freebsd.md) - [macOS](osx.md) - [Windows](windows.md) - [z/OS](z-os.md) {{< /cards >}} ## Containers {{< cards >}} - [Docker](docker.md) - [Helm chart](kubernetes.md) - [GitLab agent](kubernetes-agent.md) - [Operator](operator.md) {{< /cards >}} ## Other installation options {{< cards >}} - [Bleeding edge releases](bleeding-edge.md) {{< /cards >}} ================================================ FILE: docs/install/bleeding-edge.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install the latest development builds of GitLab Runner. title: GitLab Runner bleeding edge releases --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!warning] > These GitLab Runner releases are latest and built directly from the `main` branch and may be untested. > Use at your own risk. ## Download the standalone binaries - - - - - - - - - - - - - You can then run GitLab Runner with: ```shell chmod +x gitlab-runner-linux-amd64 ./gitlab-runner-linux-amd64 run ``` ## Download one of the packages for Debian or Ubuntu - - - - - - - - ### Download the exported runner-helper images package The runner-helper images package is a required dependency for the GitLab Runner `.deb` package. Download the package from: ```plaintext https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner-helper-images.deb ``` You can then install it with: ```shell dpkg -i gitlab-runner-helper-images.deb gitlab-runner_.deb ``` ## Download one of the packages for Red Hat or CentOS - - - - - - ### Download the exported runner-helper images package The runner-helper images package is a required dependency for the GitLab Runner `.rpm` package. Download the package from: ```plaintext https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner-helper-images.rpm ``` You can then install it with: ```shell rpm -i gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ## Download any other tagged release Replace `main` with either `tag` (for example, `v16.5.0`) or `latest` (the latest stable). For a list of tags see . For example: - - - If you have problem downloading through `https`, fallback to plain `http`: - - - ================================================ FILE: docs/install/docker.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Run GitLab Runner in a Docker container. title: Run GitLab Runner in a container --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can run GitLab Runner in a Docker container to execute CI/CD jobs. The GitLab Runner Docker image includes all dependencies needed to: - Run GitLab Runner. - Execute CI/CD jobs in containers. The GitLab Runner Docker images use [Ubuntu or Alpine Linux](#docker-images) as their base. They wrap the standard `gitlab-runner` command, similar to installing GitLab Runner directly on the host. The `gitlab-runner` command runs in a Docker container. This setup delegates full control over the Docker daemon to each GitLab Runner container. The effect is that isolation guarantees break if you run GitLab Runner inside a Docker daemon that also runs other payloads. In this setup, every GitLab Runner command you run has a `docker run` equivalent, like this: - Runner command: `gitlab-runner ` - Docker command: `docker run gitlab/gitlab-runner ` For example, to get the top-level help information for GitLab Runner, replace the `gitlab-runner` part of the command with `docker run [docker options] gitlab/gitlab-runner`, like this: ```shell docker run --rm -t -i gitlab/gitlab-runner --help NAME: gitlab-runner - a GitLab Runner USAGE: gitlab-runner [global options] command [command options] [arguments...] VERSION: 18.10.1 (3b43bf9f) (...) ``` ## Docker Engine version compatibility The versions for the Docker Engine and GitLab Runner container image do not have to match. The GitLab Runner images are backwards and forwards compatible. To ensure you have the latest features and security updates, you should always use the latest stable [Docker Engine version](https://docs.docker.com/engine/install/). ## Install the Docker image and start the container Prerequisites: - You have [installed Docker](https://docs.docker.com/get-started/get-docker/). - You have read the [FAQ](../faq/_index.md) to learn about common problems in GitLab Runner. 1. Download the `gitlab-runner` Docker image by using the `docker pull gitlab/gitlab-runner:` command. For the list of available version tags, see [GitLab Runner tags](https://hub.docker.com/r/gitlab/gitlab-runner/tags). 1. Run the `gitlab-runner` Docker image by using the `docker run -d [options] ` command. 1. When you run `gitlab-runner` in a Docker container, ensure the configuration is not lost when you restart the container. Mount a permanent volume to store the configuration. The volume can be mounted in either: - [A local system volume](#from-a-local-system-volume) - [A Docker volume](#from-a-docker-volume) 1. Optional. If using a [`session_server`](../configuration/advanced-configuration.md), expose port `8093` by adding `-p 8093:8093` to your `docker run` commands. 1. Optional. To use the Docker Machine executor for autoscaling, mount the Docker Machine storage path (`/root/.docker/machine`) by adding a volume mount to your `docker run` commands: - For system volume mounts, add `-v /srv/gitlab-runner/docker-machine-config:/root/.docker/machine` - For Docker named volumes, add `-v docker-machine-config:/root/.docker/machine` 1. [Register a new runner](../register/_index.md). The GitLab Runner container must be registered to pick up jobs. Some available configuration options include: - Set the container's time zone with the flag `--env TZ=`. [See a list of available time zones](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). - For a [FIPS compliant GitLab Runner](requirements.md#fips-compliant-gitlab-runner) image, based on `redhat/ubi9-micro`, use the `gitlab/gitlab-runner:ubi-fips` tags. - [Install trusted SSL server certificates](#install-trusted-ssl-server-certificates). ### From a local system volume To use your local system for the configuration volume and other resources mounted into the `gitlab-runner` container: 1. Optional. In MacOS systems, `/srv` does not exist by default. Create `/private/srv`, or another private directory, for setup. 1. Run this command, modifying it as needed: ```shell docker run -d --name gitlab-runner --restart always \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ -v /var/run/docker.sock:/var/run/docker.sock \ gitlab/gitlab-runner:latest ``` ### From a Docker volume To use a configuration container to mount your custom data volume: 1. Create the Docker volume: ```shell docker volume create gitlab-runner-config ``` 1. Start the GitLab Runner container using the volume you just created: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v gitlab-runner-config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` ## Update runner configuration After you [change the runner configuration](../configuration/advanced-configuration.md) in `config.toml`, apply your changes by restarting the container with `docker stop` and `docker run`. ## Upgrade runner version Prerequisites: - You must use the same method for mounting your data volume as you did originally (`-v /srv/gitlab-runner/config:/etc/gitlab-runner` or `-v gitlab-runner-config:/etc/gitlab-runner`). 1. Pull the latest version (or a specific tag): ```shell docker pull gitlab/gitlab-runner:latest ``` 1. Stop and remove the existing container: ```shell docker stop gitlab-runner && docker rm gitlab-runner ``` 1. Start the container as you did originally: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` ## View runner logs Log file locations depend on how you start a runner. When you start it as a: - **Foreground task**, either as a locally installed binary or in a Docker container, the logs print to `stdout`. - **System service**, like with `systemd`, the logs are available in the system logging mechanism, like Syslog. - **Docker-based service**, use the `docker logs` command, as the `gitlab-runner ...` command is the main process of the container. For example, if you start a container with this command, its name is set to `gitlab-runner`: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` To view its logs, run this command, replacing `gitlab-runner` with your container name: ```shell docker logs gitlab-runner ``` For more information about handling container logs, see [`docker container logs`](https://docs.docker.com/reference/cli/docker/container/logs/) in the Docker documentation. ## Install trusted SSL server certificates If your GitLab CI/CD server uses self-signed SSL certificates, make sure your runner container trusts the GitLab CI server certificate. This prevents communication failures. Prerequisites: - Your `ca.crt` file should contain the root certificates of all the servers you want GitLab Runner to trust. 1. Optional. The `gitlab/gitlab-runner` image looks for trusted SSL certificates in `/etc/gitlab-runner/certs/ca.crt`. To change this behavior, use the `-e "CA_CERTIFICATES_PATH=/DIR/CERT"` configuration option. 1. Copy your `ca.crt` file into the `certs` directory on the data volume (or container). 1. Optional. If your container is already running, restart it to import the `ca.crt` file on startup. ## Docker images In GitLab Runner 18.8.0, the Docker image based on Alpine uses Alpine 3.21. These multi-platform Docker images are available: - `gitlab/gitlab-runner:latest` based on Ubuntu, approximately 470 MB. - `gitlab/gitlab-runner:alpine` based on Alpine, approximately 270 MB. See the [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner/tree/main/dockerfiles) source for possible build instructions for both Ubuntu and Alpine images. ### Create a runner Docker image You can upgrade your image's operating system before the update is available in the GitLab repositories. Prerequisites: - You are not using the IBM Z image, as it does not contain the `docker-machine` dependency. This image is not maintained for the Linux s390x or Linux ppc64le platforms. For the current status, see [issue 26551](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26551). To build a `gitlab-runner` Docker image for the latest Alpine version: 1. Create `alpine-upgrade/Dockerfile`. ```dockerfile ARG GITLAB_RUNNER_IMAGE_TYPE ARG GITLAB_RUNNER_IMAGE_TAG FROM gitlab/${GITLAB_RUNNER_IMAGE_TYPE}:${GITLAB_RUNNER_IMAGE_TAG} RUN apk update RUN apk upgrade ``` 1. Create an upgraded `gitlab-runner` image. ```shell GITLAB_RUNNER_IMAGE_TYPE=gitlab-runner \ GITLAB_RUNNER_IMAGE_TAG=alpine-v18.10.1 \ docker build -t $GITLAB_RUNNER_IMAGE_TYPE:$GITLAB_RUNNER_IMAGE_TAG \ --build-arg GITLAB_RUNNER_IMAGE_TYPE=$GITLAB_RUNNER_IMAGE_TYPE \ --build-arg GITLAB_RUNNER_IMAGE_TAG=$GITLAB_RUNNER_IMAGE_TAG \ -f alpine-upgrade/Dockerfile alpine-upgrade ``` 1. Create an upgraded `gitlab-runner-helper` image. ```shell GITLAB_RUNNER_IMAGE_TYPE=gitlab-runner-helper \ GITLAB_RUNNER_IMAGE_TAG=x86_64-v18.10.1 \ docker build -t $GITLAB_RUNNER_IMAGE_TYPE:$GITLAB_RUNNER_IMAGE_TAG \ --build-arg GITLAB_RUNNER_IMAGE_TYPE=$GITLAB_RUNNER_IMAGE_TYPE \ --build-arg GITLAB_RUNNER_IMAGE_TAG=$GITLAB_RUNNER_IMAGE_TAG \ -f alpine-upgrade/Dockerfile alpine-upgrade ``` ## Use SELinux in your container Some distributions, like CentOS, Red Hat, and Fedora use SELinux (Security-Enhanced Linux) by default to enhance the security of the underlying system. Use caution with this configuration. Prerequisites: - To use the [Docker executor](../executors/docker.md) to run builds in containers, runners need access to `/var/run/docker.sock`. - If you use SELinux in enforcing mode, install [`selinux-dockersock`](https://github.com/dpw/selinux-dockersock) to prevent a `Permission denied` error when a runner accesses `/var/run/docker.sock`. 1. Create a persistent directory on the host: `mkdir -p /srv/gitlab-runner/config`. 1. Run Docker with `:Z` on volumes: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner:Z \ gitlab/gitlab-runner:latest ``` ================================================ FILE: docs/install/environment_variables_in_helm_charts.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Set environment variables in GitLab Runner Helm chart --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Environment variables are key-value pairs that contain information that applications can use to adjust their behavior at runtime. These variables are injected into the container's environment. You can use these variables to pass configuration data, secrets, or any other dynamic information required by the application. You can set environment variables in GitLab Runner Helm chart by using the: - [`runners.config` property](#use-the-runnersconfig-property) - [Properties in `values.yaml`](#use-valuesyaml-properties) ## Use the `runners.config` property You can configure environment variables through the `runners.config` property, similar to what you would do in the `config.toml` file: ```yaml runners: config: | [[runners]] shell = "bash" [runners.kubernetes] host = "" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true"] ``` Variables defined this way are applied to both the job Pod and the GitLab Runner Manager container. In the example above, the `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` feature flag is set as an environment variable, which the GitLab Runner Manager uses to modify its behavior. ## Use `values.yaml` properties You can also set environment variables by using the following properties in `values.yaml`. These variables only affect the GitLab Runner Manager container. - `envVars` ```yaml envVars: - name: RUNNER_EXECUTOR value: kubernetes ``` - `extraEnv` ```yaml extraEnv: CACHE_S3_SERVER_ADDRESS: s3.amazonaws.com CACHE_S3_BUCKET_NAME: runners-cache CACHE_S3_BUCKET_LOCATION: us-east-1 CACHE_SHARED: true ``` - `extraEnvFrom` ```yaml extraEnvFrom: CACHE_S3_ACCESS_KEY: secretKeyRef: name: s3access key: accesskey CACHE_S3_SECRET_KEY: secretKeyRef: name: s3access key: secretkey ``` For more information on `extraEnvFrom`, see: - [`Distribute Credentials Securely Using Secrets`](https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/) - [`Use container fields as values for environment variables`](https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/#use-container-fields-as-values-for-environment-variables) ================================================ FILE: docs/install/freebsd.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner on FreeBSD systems. title: Install GitLab Runner on FreeBSD --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} > [!note] > The FreeBSD version is also available as a [bleeding edge](bleeding-edge.md) > release. Make sure that you read the [FAQ](../faq/_index.md) section which > describes some of the most common problems with GitLab Runner. ## Installing GitLab Runner Here are the steps to install and configure GitLab Runner under FreeBSD: 1. Create the `gitlab-runner` user and group: ```shell sudo pw group add -n gitlab-runner sudo pw user add -n gitlab-runner -g gitlab-runner -s /usr/local/bin/bash sudo mkdir /home/gitlab-runner sudo chown gitlab-runner:gitlab-runner /home/gitlab-runner ``` 1. Download the binary for your system: ```shell # For amd64 sudo fetch -o /usr/local/bin/gitlab-runner https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-freebsd-amd64 # For i386 sudo fetch -o /usr/local/bin/gitlab-runner https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-freebsd-386 ``` You can download a binary for every available version as described in [Bleeding Edge - download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Give it permissions to execute: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. Create an empty log file with correct permissions: ```shell sudo touch /var/log/gitlab_runner.log && sudo chown gitlab-runner:gitlab-runner /var/log/gitlab_runner.log ``` 1. Create the `rc.d` directory in case it doesn't exist: ```shell mkdir -p /usr/local/etc/rc.d ``` 1. Create the `gitlab_runner` script inside `rc.d`: Bash users can do the following: ```shell sudo bash -c 'cat > /usr/local/etc/rc.d/gitlab_runner' << "EOF" #!/bin/sh # PROVIDE: gitlab_runner # REQUIRE: DAEMON NETWORKING # BEFORE: # KEYWORD: . /etc/rc.subr name="gitlab_runner" rcvar="gitlab_runner_enable" user="gitlab-runner" user_home="/home/gitlab-runner" command="/usr/local/bin/gitlab-runner" command_args="run" pidfile="/var/run/${name}.pid" start_cmd="gitlab_runner_start" gitlab_runner_start() { export USER=${user} export HOME=${user_home} if checkyesno ${rcvar}; then cd ${user_home} /usr/sbin/daemon -u ${user} -p ${pidfile} ${command} ${command_args} > /var/log/gitlab_runner.log 2>&1 fi } load_rc_config $name run_rc_command $1 EOF ``` If you are not using bash, create a file named `/usr/local/etc/rc.d/gitlab_runner` and include the following content: ```shell #!/bin/sh # PROVIDE: gitlab_runner # REQUIRE: DAEMON NETWORKING # BEFORE: # KEYWORD: . /etc/rc.subr name="gitlab_runner" rcvar="gitlab_runner_enable" user="gitlab-runner" user_home="/home/gitlab-runner" command="/usr/local/bin/gitlab-runner" command_args="run" pidfile="/var/run/${name}.pid" start_cmd="gitlab_runner_start" gitlab_runner_start() { export USER=${user} export HOME=${user_home} if checkyesno ${rcvar}; then cd ${user_home} /usr/sbin/daemon -u ${user} -p ${pidfile} ${command} ${command_args} > /var/log/gitlab_runner.log 2>&1 fi } load_rc_config $name run_rc_command $1 ``` 1. Make the `gitlab_runner` script executable: ```shell sudo chmod +x /usr/local/etc/rc.d/gitlab_runner ``` 1. [Register a runner](../register/_index.md) 1. Enable the `gitlab-runner` service and start it: ```shell sudo sysrc gitlab_runner_enable=YES sudo service gitlab_runner start ``` If you don't want to enable the `gitlab-runner` service to start after a reboot, use: ```shell sudo service gitlab_runner onestart ``` ================================================ FILE: docs/install/gpg-keys/49F16C5CC3A0F81F.pub.gpg ================================================ -----BEGIN PGP PUBLIC KEY BLOCK----- mQINBGRIarMBEADBedWkhv4Vsi8Rfov5XbKNvAnoT4Iuoz6dQWxBUV5MWY6J3MFD v7ohrnHDYZaOJ+9HGvGNKCYyqvJpTDD5YNxhCbQ9oWjHj83t4zMzeoNQ6H1PrEV2 ClZqhiwQia7th3PJu3yjl3apXKfCNr2jq8hkqjNGkXE+UsFTEy4xu6AXBvaBfcm0 gmR2KL9Nvkvu3ZgDCgsD0evpdWQz3q1IXvDKpFmpxaWgCXIPZONF5XGonwFtQaV5 U0lcPL17e5JiGhNir/DAxjw0IIfdNCdfxQWn1jf0mSqCC7CVk37usWujFHmNzI0F GoCPp17RKf70WXOneRA7bdFXH9U7aPiwGt7sftRIluheJovWMgYTHcmTwV/N+FJs 5p5Bp3yqFZyOUAo11bEe4N1dJLKELbYe7DnYbewaMjBgkaLAaPhQB6fGW2FnLh87 VUn/9qpdYIAK71oFeFYXUYbJI1elWrOK4ONME6N87mCo5psHm3Re5u9HiRUl+q9H KfzP2KH7MxwmpmcpFFNmyHeXWaWoFCDW5+vHVL7DkRhDw0eLI85uAYvD5gwWncMP ZNp9jmfSpQ/yuyH96148eMVEQyrlUj4if3odU8pWoUQJC3mtk8HA5/KSagFlmy6h GBhhTBZmz0HwJT4TL/QulD1kpdnTueCEe9jUAetHuKKnYK3Ckknf0svOLwARAQAB tCFHaXRMYWIsIEluYy4gPHN1cHBvcnRAZ2l0bGFiLmNvbT6JAlQEEwEKAD4CGwMF CwkIBwMFFQoJCAsFFgMCAQACHgECF4AWIQSTHaac+jr+u8l9qoxsV8Kca6daTgUC aZ/KfgUJBziTSwAKCRBsV8Kca6daThThD/4wldKBgoZ/FX9nu9vDlIkxIXcCvLRg a/Ez6On/ydnybbjKTQ5zrn4yBcqQOjsHDJg2fXfKZG4xkNaqDgcn6nU2d5wy1EB9 tw9r7txGhyxAX2VwxDZ3ZzR8Aa3Wpw1RBdcT93PIXXKeZESKPHIUrqvFpdQsddan /dZi4GhhRei2ABQ/XzRHezArwlb/aNnX19GCrnzwcZIWf/gAyiM1QOrURyUAQTIM 5VnqRG00U/4H7V9Y8Q4bvSQ0Y1SQFpo7hDcnCOyC9iAdYHMHjd3FUa67sYd5u1sA dhnXAG9jerCGW2darhOBb2PF+H6zm1GvztNgecGTOHFXKE1Sq074lyMPHxUpHwtI Qyy+mKxKkUF2GIvBav9yRLIfuF7mKelEczb81w2AilRGPQlWsk0L68BkIjPP1HHx NVVk/LE/f451+71vNkhkT8HWEeY2QzTyoOPTeWr9mumplcknRI97nKV7xJwGn6sC SsSr5QMJshMTq2BpaRD6BMsdn8lp6KcFouyZsJOJfw9v3Qq9GSP5m5BKNQIrwlcw 1M04awYnhhRMgs/YGN61wWwF6qNa5n2yKxYSqU+0Bwu917sP+gI7r85xGJ8q04Fj oN0NeRHTo1Q84cY2rkKJ5o0oKL0Szk2lFpppVriziQBZ/K7RiPByov5VOPAp4wcI Eo/2I8bGrYROr7kCDQRkSGqzARAAurPXpFBgvbEdVIGvrFN71d5zXeaH12In0OmR 9tYhv3KqLp7ed+QJsCcScvFPXs5VvdXW0ahT2hw7sEiRmex/2MmrSqPi6kt+xKkL iC+u8qVR9xWueNzAgdqfKSSoHgi0uZPtK8rkYGLnCO4tq4DkxLhTt5SAxUCPrZnA gkm9/UO+YwlHJoxjqu0dRgHfs0sopxS+HcZc6p4SnEgNsx1m6cRFIDmLPvlHDIQw FR6nAJCGz1u5NwO85UDbxMpRYH4znzeSVfbU+/DaBe8++iHPraKrkaUcBREeb4S/ wfj+9uwjYwgYg6wnEVC53x+nMNOrlzwJaIpiSiQt4Qh0kAFpobDUAljcMZzl0m6U TlwJNDCaF3PJR12A3vJgxGUCFukcytQYT5sLeEoaeAJ5icgG1DLH3AwJQnZUrqTA +0dQ0WxK2obOL7aLK55ekFaFtcFZoNz9zA2rnbvc31L/3lqRfvltX7WeKz0FvfMJ mAsoYzHFJV9h3Nc3dSpY7xst8+3EvAYif4utD2hQ52cwQZgE7pNKc0zIdvYs2u6q j2zh9Y1mDI35xsO2n4M1EvkD/oKbzJRNklS3GW2Phsz+4R7uV30SjswAE3JqpYna 9uT/nbjlwRu5Xig/Ry9pAFRYmNcRoxOEEhwjXYQuyQl66aDN76Mw7rcDmoD6jd/u ssbcku0AEQEAAYkCPAQYAQoAJgIbDBYhBJMdppz6Ov67yX2qjGxXwpxrp1pOBQJp n8p+BQkHOJNLAAoJEGxXwpxrp1pO9PIP/Rs5B6EX47jFn+8BRhsJUZBVedEVh+yz zLlmHpSFqlshRS9tPUuKaMMmgAcBuoX4dB6WQtRtfwHZpeSw31CcwTW8XGDpk/9z NTHXxVjpq97+GTMM19eGj2V6kuLd8wqWF4BF1HQCNyYuRkuYdSkXEcAOzmrGJLsS lakwGMYS1YtdZJEV8iH7m8N7nc03bnwrdJpLm2Dtm+yQxlZmDaUqliRMGx9lriKr NnNhgIY8fW0BL5lETvSPV+30E7zQn8B/Kx+dYztO0QJihivWx+TTYXzy5+lC2ulF 4LD0sEWnBpd4iqYgA5YYhHHkXLHXl32cdaxMHVJcLXY7hqBuVssKhB4VVAzttL5z HvdDOlY26O6WNf2nRQHZo9Wa6o16jbmuKG/DPCfIw5cOI7K+oRLiGGw/EdkMSegM QD1SC2wnyr7vzRzCfYwAUGqgvyc4coVXDcpgZb5Nk8+gXrwMZgm2do/JYL3rXSVz DLkdpMQjuqebbbm6eHsNYn/aaQRYerc9ZMUuujO6rXwNEkMULYeD8dTtZm66a0cZ WXtxK9gdbgS+OosNcN/gnDBdWmaRhvl49dHZCI/v0VNmz6r7UJmIsUAAele7K7R6 DBHtYGsVA8qJvIEJ4/DlprPj4Oy/yNF1lZyzmcc6+jqDTYtjdXM7Kp2JRyy1aF9f sjyP+rNESEzj =UOpn -----END PGP PUBLIC KEY BLOCK----- ================================================ FILE: docs/install/gpg-keys/9CE45ABC880721D4.pub.gpg ================================================ -----BEGIN PGP PUBLIC KEY BLOCK----- mQINBFsr3zsBEADrgMkSuaETS18+516t7tYyRDLvdXfgNKkssbQ3eGd0SSzpMRtX 7Op6SJRVEsrNoGs916HkGdPpywpEib0tDWkxW8Du/5BcIdLE+3vD09WjEAtlMsSU HR2wcdpYHGgUuiOp/jqGNeP/lKAuTl2P3NFCkDJF9DvUGzKsI1nkfIa1RMOot3Is BUYmjCraQcnqdwT+idobCq30SkR6Fohpb5F9RaAbgnCzgOEkKCT0n+1vo6NkmKGT 1FRagWnHMXMZeE4PDDrU4+6b3Ev7hV6nQxiLtJSE4EI2z4hOhia0FcCqRiPVc3AA vjB68d1xOdMCBEir52I7JO9b0+hr0xrL7bgfyD5xCsncbq4ha3cQE51x6fPITO/y IcVyqzzEZJmmWeHLyPAevnAdWh6YbPlQySpkvGeUcRLUgbDT9bhKGJvYoWYy7Axw S3jJgZR/c99HyhmTlruA45XIIWIoE5vK3+kp2cu/+Q+J8QFmxVJli9ZkOGzXxR0z hrmKZaGNnqzwlQM3gDj0R5Dgb004La8E5xqi9Gbb9rXBj0UFF4KxGIeEgdgD7QEO igHztfvQ0/CMq8iJ/jYmjkhJKDgET2ULaVU+aI1s7bV74TKoteSskTAe1vg7KefS xFifwkRQ6tlp8vfsu8FK69mgtMVpXxFHODy1RrJgSsFNAUkhuupmp1Q8hwARAQAB iQI2BCABCgAgFiEEMBg6wsTiOkCe++cFnORavIgHIdQFAmC/awsCHQIACgkQnORa vIgHIdRNERAAoj0AEQNG3DrUNscx9V+Yi3ILh5QiNuPW5tUhQ7/1dHQZ2flbGpBl 1PXK5rUt5l2qgI1KmPcRn0Ruw6Cr3Vw37GHPySYf+FXkBqUPbPAIGt2pv050fm+a bU1Oelskzhf7SwmYWTAp1opaFIXyzvEnQj2JkLmo7mrpPXHiiF3TCGMIGKYhPJM6 KwdMziqJ7KneZcFZM3Np/ldqo0gb+v7HVc9d74ncCA2Kf/0XfC3ha/uE6LHNSP6e CLvWq13u4TydX0UAuytbtMYyIlFdTGfoGdAbyeY6Jl8xnhZfEiATFH8LBtRg0qh6 ThJ+ciD5ui8iXaKcCnCdJpKzSR1mP0kkyfjHPEMgp+OeIWskxiWAD4D/BjVavwZL 8q6rQC/QesqYkUvKoSb338941ZK7c+O5qmX7QxNpPuqILr92dgx/HvMNkM1GD950 2fbFHMvCK+FXf4F1kfSAP9lI2VS8P6lDTB5M5ddJir5BVLeRLa05e75/hiemqM/i MRiy/fcTXFtcs2vRNUuogyNRZxrTQBHsju8QdU13tOq+XbAfACiqQJgNOUr5ZK77 OhLu3eqdf7znwJc7Kqsn+Nqf2JeH7NxQghBiqHD6CEv8zFoIrFeC01dVIvB55zo4 vr46R6UUp5gNeuw+NbcAY6ZDbtBfCMft8XUa8+ubm62iekXKmjrFWUe0IUdpdExh YiwgSW5jLiA8c3VwcG9ydEBnaXRsYWIuY29tPokCVAQTAQoAPgIbAwULCQgHAgYV CgkICwIEFgIDAQIeAQIXgBYhBDAYOsLE4jpAnvvnBZzkWryIByHUBQJfLAPRBQkF 4VgWAAoJEJzkWryIByHUwQ0QAKztKJZr/saB97WO1guM3HtPScPC7kWnDPptvrxd 9Y0UMmw8seb8BbxK9Ad0RooOjxZsiDb+GHXl9JfbT8HR/E9VsB4pDv3+ipORn+ji REKqqsfWuNzWN2QLKwHH/C3ZMh+R2ut3db+kLaQr2ED6UtjMPLa8laqI0uSly5c9 k3/f0F9RggqOpuDlzrKjkaccgYgJHuAkETCJXIZYL9KcgFElUseWBY6xyC0nbokH HynODKImwTCVR/ppmhfJ6RrBXZ/AWFL26Lfze+4DHrgFymtVRFiAXknaqDDgt2H0 Kxk+9Gg24Tdp8OCsAmei6/9lUBO+9TVp5JVGZ0+TVglB+tj58xLMxfsqmCO9fbT2 uNKhXYm9XkduankYKZU69EW/T7JxMUpw5HdfepcL69Dn/5kzkwg6Be1ukyJ3gsLC 1tcCD8oqnkWJAdAEybc2Ozh8r/xn7qj7kYYKtqDRAeQ+IcHcoz8F3jH8c+2KBHpv LAJIjdI/8PRQ1mCUYF2SIuUsGOTF6uWQRlyF3Vx4oCuk8DRNXJXUTDzBpVpfAzU7 eXG1mzge6tVjZDEDkzVA+mNInB66GZeUXxqq880WaxMpdzZvqcREoTMoNHG+zB8x Xz3jNzTkkqV/0MuQG88ROPR3g4Ks+zlGN6FeTf0spK3NXe5WqAww6wYy12e9ONV8 5ZoLiQJUBBMBCAA+FiEEMBg6wsTiOkCe++cFnORavIgHIdQFAlsr3zsCGwMFCQIX V5cFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQnORavIgHIdQFsxAAgISRuCKR yT9+xFjTBIsFx4V7y5Q+Y6ESy5/1JuftRr/OvKP2ijXqrfA5UD47Ndzcu9jWCxKM upEy4XvqJ4Wj04N6Yo9uIeOnUHVPM0dbD1b0Om74oMvInzrnBf4krs2mJqg+tdMS 34sfhF/+BOYiNsjvqvsbV+T/NFom8s8sjJZ/FbOkJbJvLajpjAReFFvbE3Wf1NYN KZA0wLplA4102KEwupVTgC6XCR5R+jUKBD9b3MAeawKJuFzVA3BaJFUbOl2sAuc0 bSqgVSdcM/e9n6mQaReiPu1ciu7nkpV00mMX6FC1B0x5hs1jsUY3zUJNXgaSIKq1 LU0HgvR1fTplkj+DEVUp8z7uzznYwBscT47kCXprsjlQjiC8xiLSO1bDLnNCAHl7 EwNlceRAGVKaeHWj2ImwZuq5pOKVvEqqNGItE3y26+JzTfUlPMI9pZRRbdxX1gl+ FFlbaeQiVmyH7dELsmlbm2icemCdfzcHNBRWutHzPOQ3cB3D0OSouj20C7WW5c10 ZcwKCgIbE2YxnZQwpZV5vjKCvMV480J3gpk9dNH01Etxvwa7/D0Pb3NBiQ7x2QkP m9CZ6rvdJtR6/wT7q9JWWU/d2yzA9bVDOmtHS18n8guZxNkJaEz4WCOIUEo9dZQ4 jiQhZ6pDAUUu+oQRIX495nOXCyWuffmK7iy5Ag0EWyvfOwEQANrK2LKOCaxslyFX nD54DlY2g5OgmJB1k1kU4gC3cHu1uMVa9hCr2UkdLs0QqI7Q+fQFtSMWHY0Sal5w 532fsCp3Qujuk6VLoSUBjvmARAk0jneyyik2X2RNdUqWcTIxCStKmukXUqZPJtP/ r3tTZ9zkyr0NUNwO7SYGARdlnAopOsy5jjf82IDKPN7zTZ+HfwHYbvdQT4VZFeR4 sZC7h+tjXucFjMITAhf8aJ3QNWzNBRzMkmM9RTeKt9l6e0nAF8IMdTLn9VpywAf4 l+76lS0XKU97O6JkNTsbCW2sF+md4im1y0L8qN6R1JvQ/lSzK/wUN1JlZft+64yH vaQnDfftl9Zf5LKNIMSQt1NycUjkYuxLlmEwmvXOIX+k82wnpJvLCqFoPDcmW7Gc fGpz1O3BP/CjyDtjZ+BWsvnp190VxidIaG2GIpCgLRcwFwXNQ+jOhMLVSOUK/PX9 /IeFVdkL1elp1wKwk5VSpHMaf6SRkYb61o1S13lh+WcpgOU/q8DhDdJ4xW43W4Xn /ErzXsZbKY68y/a5NwCF9aLFiUDRCSQWb4XzubE288zBvb1GKcfgFFInj5x5Gl/m nknw6QVN0GAWlMkFThwd1NhHi0/UuKpm+7+9Iy/pGSdcnUog+iAzK/hk40vXaksC J73By9cg2dymg+FBAKcXfXM0lGPVABEBAAGJAjwEGAEKACYCGwwWIQQwGDrCxOI6 QJ775wWc5Fq8iAch1AUCXywD0QUJBeFYFgAKCRCc5Fq8iAch1C09D/90rQL92Iic XOsUpZJVN0QfnP5xfyKieFuashHUCdjhCCrDK1QRXe76jDqNTy8iZI+NDIOWcAAo GCM3tflCrgh64Dfx0oPIL5WaJaHOztoAZmJfnNAO3oEncUyXLS0UNabYksCXQdIl iJv6XVKsz840wWdMc50W3kPbkl1z4ZsETSe+Gib48JYnrB1PZhuqr5aKXpILmXRK d9iaioEJ72FCpQTnIOBOaaaunW2WqHWtrl5r1GSUIFxwKDvTKCKnT2UOYRf677cA GlIPfbEFJoqtI71DGy9uxMX36AMcV9QqDAEFJGq4kabqTcinMocPjOXM69ja637P 2h2DznUv9WC8pbfx7nrNxGv4ztGorGKbiayZu6mPMoCrxoTcMjtX1fRLc4UbMgWN VEhRuka/covWSs2LdSH/YMK3Z8Oz8ZX3FUu5LL3pkxyTN63uIkMn059o3aZzIrD8 mXh5nAArd60kJ/wAhPwPa5WjwnGg4BYcKIUyomTZqcIqPbboJSaRwVUOfqsXlEZP FXJYEbWsJA470XiIzOkrBuL6N0py7S8ee6Kzyj49hFz8CvZgS8mfyOC8yj4Hne2a dU1a7NWZnoCC1IalGMNcqOtv89tnYw90BtqXEkNxKsOgIF9M88yet265eFZm9VHv ZBuYgJmq1KP9oVQXJu5x9U/Jve8biKoFFA== =80kJ -----END PGP PUBLIC KEY BLOCK----- ================================================ FILE: docs/install/gpg-keys/A674BF8135DFA027.pub.gpg ================================================ -----BEGIN PGP PUBLIC KEY BLOCK----- mQINBGC6AjUBEACkSEBKWN5qzl/Q6A07Brjw9rj83Shj0r+Iq0lx8yOc/sa+xb8V b4VB7PDLz5HzPlxtGVH5u7420VfrDhsPdicj/5s0GlJ/aCMLO7oaNjhpO6CZUJBL tUQ5KtGap2ibpYA9mwLE5oLWfxLQPczbta5EctqEbDgrDXEZ5AVvTn7c7mhxLV5x ++RP1C2HrsUAVVRzSWVcsDSqXNQXuzIBtAEuwYQI4V6uCGSw/WZDlWwZ7j1OOh69 JNEudWFnK0N+7Tei6ulTkLlIi85XTZD+Pnl/kPhUDH9/Ce2fg8cKQN+wEKUC5QuX lVnclhyOp7PVlor6ZSvvvaZHDM9D7uHFJj8ImjvmL8/mEWqxWz3jbV6cfIIRKqQv 4WC7dVrovr1gGs4qFPb4o4DopOgKG+mt9TjKArdyFU5fRHicVNyuMglVkuq48b1g pxVAQvHs1keoW1mSlCZOwYYwXJeq6Y+kuSMZugid0cBxogKGdyqzVvLyyZOhZBYC bDe2JLvM9IVN122owouxmR1pItT3SM5LR5wbBEZLEPVeyvqfHcyoJHhgoXks2MXt +GQgOnGkWCLokk4U2AO1YTfKLDepxmTZQpT2Jr+tzhkQ1VGxdhFlxd2jB8NPbVDn HuG+ExLjobW7Qk8SBecKlaW7h3CWN9bGurllseAawv8t/aFc/MwHPzZX6wARAQAB tCFHaXRMYWIsIEluYy4gPHN1cHBvcnRAZ2l0bGFiLmNvbT6JAlQEEwEIAD4WIQQJ 5XCD80zKlNVBvFimdL+BNd+gJwUCYLoCNQIbAwUJA8JnAAULCQgHAgYVCgkICwIE FgIDAQIeAQIXgAAKCRCmdL+BNd+gJ/GCEACfDy88i3ywYjog2QJpeWtux6GeoQTM tKPlqnlauShKfRhSDfliVpjOHzV2blkFl7CJDf62Bo1Nvk8GgqPlG6aIFsPbWFha nyQiRvIbwjRnYU+E9+zW+Y6jb+EynLx4kv0KhmepZEs681KSbEC1AP7AKmiqZY2a GgSgq6d4yG0zDYb1XLC7RvRl1O8GR/uaSZQo+678/SigSApSHXvaUelEcLxg+Hqg qtCVCAzhouvDT/Ytz8oZHp2ZgBbxZ6qwZHhJwcRuIyzWAK8fdXUB7KGnJMGF0L/u Hvp2bRYYr7XNGID2pCpscNZKaQSNeJx72PZ/12u85eenQgoEtPEjrgQ5ZCyNZuTH bPdeiUxHx24rWIlN8/oY8c590nz2uDYze42IAyQis+2m0jX/KFXOuXRfrfnV1XtD Sx0pxxoHc2mJ+wQrOG2jLnqQPzQGwSrga8cjRBuYiFWW0UZieTpPNuXcjbbVchDe GxQrDhQvzAjwk9xOD2/56+JEK0jXAxEb6wN3c8lNJkYXxDNGIvKU1eMnC8XbSkL5 hyWmSvr/Gt14Hp6snPfedxcPahcCOZ+NsvB/DFp//ypnO98UUYS6xOBsnn5u556/ aZa+uqpbZgr4L3mHTw6eafMCW0QGDgL96gPBOVkJWQPcBjToKxLxFdmLs2OAqzuy 8YUXDMHcf1H8vbkCDQRgugI1ARAA8mNCi204fPoSqFba8I96ssLi808atUvTrWVH Sb/T8QhQKeYhw2jDboh3jWR1q4Hj/tpc5sN9jRFXDn1pdUlbFjLGHNF8PpbNklh/ kvIPhvIabsAQn30YDeOitRuKGQ/Ncxxbe5nD96b3M0YA1YuRItno1ihvvEVEq2EG vMcB7UEdvPQp6YYZCjO0jbtLFRkFcbj09r7pYNQLSv7zDCJJbYfxtbhflI5E9zJy ejVYnHisnzs0y+Ts5OJil23+b+D/HMvv4DwSD8AmVhIhEAOj1zX01xmcQzmbv2Mx 6KmV2faoq+X53fUBJbNhSMEluPgYkxUtsY4wYth3eawWiBW009QMVo40RMjVN42i v6DsUFE75mLMu5/HWNC8bIhSFGdkUs4QVQ15wBDof294FIAEsKgfHaQWgv+xE24y CxLTyy6HIMsheV6OtuwsUwLh0InhUydOoRbTfEJNuROrJExXSpMBCWq7sELWm5+n RZA4iwwb/zo0xJO4GIqmsiG/JkgYTOiE7789BSJ3g6krsNl01UZ2Zlpxsh6m+prv E3iYWeUrte+aMcDzsainhIC4Vu5IJSfz3siFx80hDjB2NwFYMJh6xdSuhONdnN76 rbkHwTT1wZBQNZVlljusTnqajruYl6eWyuUwNLrrva3kC4QYGllxr/K9ZEfLNTAH S5xg+CsAEQEAAYkCPAQYAQgAJhYhBAnlcIPzTMqU1UG8WKZ0v4E136AnBQJgugI1 AhsMBQkDwmcAAAoJEKZ0v4E136AnG1sP/33T8HHNxVGV6GKC6OIKTWPSag1y5yRN 26uNsM5o3tE2ib7oTESv/Sz95RVGXe3d6CB2wL6G/Atsu648eX+xODKd5vlcs9pO 5+PR/25SZcvF3B4mooyQQJ18dhYH5abJ7LtixQrcClBVjhzMpRGSQjSf8Iup7WYG Z+RoIhOaOqNZIfutapCjX5V7CXdoerCFTeMJje40Ovi/xfYvrepvHxz3WqnBLrL5 njzv1fV/70F1Skv51w6Qn6I2nxrvm/7Sf7wA7+HZ4EWjTx93GzZ8njO/Cs2YA56K s5XQauHk6abbkqTtcU08nyLji5BivX9+U5LrEp/svL0x7HdcE0smhv3pt9HdPhss uKE37krV9FJ0vKxc59zkcl4W7Pb5SGuSzra4vGzAocrCa0KrIrJrYZvyR8hiajbn FaCrU7IGrKzmWKqb3j1/P0ShWIzGtK+F1hyY2n0C77yUdau3WKmBWDcdTuAls4wg 27mQWTfW5xxGIJw6Rxh6/3Kc0tPsNJH0uDkEp2Jol+1XWbY9t6QzgLA8M4zbyB/D cDjPP+Esfm5WkZRRS6FHWE4bnrRH2nc7qd3Z2ey0CEyOlMj7sgGSo3FbyJTmS+Tk rYmT+08DLjl6pwLfLsCuXRWYGa+a0ofzeUNB9UZ3eUVA7mKX2CN+USijf9fadGpm FAgg3FBocHZ4 =xbUD -----END PGP PUBLIC KEY BLOCK----- ================================================ FILE: docs/install/kubernetes-agent.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner using the GitLab agent for Kubernetes. title: Use the agent to install GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} After you install and configure the [GitLab agent for Kubernetes](https://docs.gitlab.com/user/clusters/agent/) you can use the agent to install GitLab Runner in your cluster. With this [GitOps workflow](https://docs.gitlab.com/user/clusters/agent/gitops/), your repository contains the GitLab Runner configuration file and your cluster is automatically updated. > [!warning] > Adding an unencrypted GitLab Runner secret to `runner-manifest.yaml` > can expose the secret in your repository files. To manage Kubernetes > secrets securely in a GitOps workflow, use > [Sealed Secrets](https://fluxcd.io/flux/guides/sealed-secrets/) > or [SOPS](https://fluxcd.io/flux/guides/mozilla-sops/). 1. Review the Helm chart values for [GitLab Runner](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml). 1. Create a `runner-chart-values.yaml` file. For example: ```yaml # The GitLab Server URL (with protocol) that you want to register the runner against # ref: https://docs.gitlab.com/runner/commands/#gitlab-runner-register # gitlabUrl: https://gitlab.my.domain.example.com/ # The registration token for adding new runners to the GitLab server # Retrieve this value from your GitLab instance # For more info: https://docs.gitlab.com/ci/runners/ # runnerRegistrationToken: "yrnZW46BrtBFqM7xDzE7dddd" # For RBAC support: rbac: create: true # Run all containers with the privileged flag enabled # This flag allows the docker:dind image to run if you need to run Docker commands # Read the docs before turning this on: # https://docs.gitlab.com/runner/executors/kubernetes/#using-dockerdind runners: privileged: true ``` 1. Create a single manifest file to install the GitLab Runner chart with your cluster agent: ```shell helm template --namespace GITLAB-NAMESPACE gitlab-runner -f runner-chart-values.yaml gitlab/gitlab-runner > runner-manifest.yaml ``` Replace `GITLAB-NAMESPACE` with your namespace. [View an example](#example-runner-manifest). 1. Edit the `runner-manifest.yaml` file to include the `namespace` of your `ServiceAccount`. The output of `helm template` doesn't include the `ServiceAccount` namespace in the generated resources. ```yaml --- # Source: gitlab-runner/templates/service-account.yaml apiVersion: v1 kind: ServiceAccount metadata: annotations: name: gitlab-runner-gitlab-runner namespace: gitlab labels: ... ``` 1. Push your `runner-manifest.yaml` to the repository where you keep your Kubernetes manifests. 1. Configure your agent to sync the runner manifest using [GitOps](https://docs.gitlab.com/user/clusters/agent/gitops/). For example: ```yaml gitops: manifest_projects: - id: path/to/manifest/project paths: - glob: 'path/to/runner-manifest.yaml' ``` Now each time the agent checks the repository for manifest updates, your cluster is updated to include GitLab Runner. ## Example runner manifest This example shows a sample runner manifest file. Create your own `manifest.yaml` file to meet your project's needs. ```yaml --- # Source: gitlab-runner/templates/service-account.yaml apiVersion: v1 kind: ServiceAccount metadata: annotations: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" --- # Source: gitlab-runner/templates/secrets.yaml apiVersion: v1 kind: Secret metadata: name: "gitlab-runner-gitlab-runner" labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" type: Opaque data: runner-registration-token: "FAKE-TOKEN" runner-token: "" --- # Source: gitlab-runner/templates/configmap.yaml apiVersion: v1 kind: ConfigMap metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" data: entrypoint: | #!/bin/bash set -e mkdir -p /home/gitlab-runner/.gitlab-runner/ cp /scripts/config.toml /home/gitlab-runner/.gitlab-runner/ # Register the runner if [[ -f /secrets/accesskey && -f /secrets/secretkey ]]; then export CACHE_S3_ACCESS_KEY=$(cat /secrets/accesskey) export CACHE_S3_SECRET_KEY=$(cat /secrets/secretkey) fi if [[ -f /secrets/gcs-application-credentials-file ]]; then export GOOGLE_APPLICATION_CREDENTIALS="/secrets/gcs-application-credentials-file" elif [[ -f /secrets/gcs-application-credentials-file ]]; then export GOOGLE_APPLICATION_CREDENTIALS="/secrets/gcs-application-credentials-file" else if [[ -f /secrets/gcs-access-id && -f /secrets/gcs-private-key ]]; then export CACHE_GCS_ACCESS_ID=$(cat /secrets/gcs-access-id) # echo -e used to make private key multiline (in google json auth key private key is one line with \n) export CACHE_GCS_PRIVATE_KEY=$(echo -e $(cat /secrets/gcs-private-key)) fi fi if [[ -f /secrets/runner-registration-token ]]; then export REGISTRATION_TOKEN=$(cat /secrets/runner-registration-token) fi if [[ -f /secrets/runner-token ]]; then export CI_SERVER_TOKEN=$(cat /secrets/runner-token) fi if ! sh /scripts/register-the-runner; then exit 1 fi # Run pre-entrypoint-script if ! bash /scripts/pre-entrypoint-script; then exit 1 fi # Start the runner exec /entrypoint run --user=gitlab-runner \ --working-directory=/home/gitlab-runner config.toml: | concurrent = 10 check_interval = 30 log_level = "info" listen_address = ':9252' configure: | set -e cp /init-secrets/* /secrets register-the-runner: | #!/bin/bash MAX_REGISTER_ATTEMPTS=30 for i in $(seq 1 "${MAX_REGISTER_ATTEMPTS}"); do echo "Registration attempt ${i} of ${MAX_REGISTER_ATTEMPTS}" /entrypoint register \ --non-interactive retval=$? if [ ${retval} = 0 ]; then break elif [ ${i} = ${MAX_REGISTER_ATTEMPTS} ]; then exit 1 fi sleep 5 done exit 0 check-live: | #!/bin/bash if /usr/bin/pgrep -f .*register-the-runner; then exit 0 elif /usr/bin/pgrep gitlab.*runner; then exit 0 else exit 1 fi pre-entrypoint-script: | --- # Source: gitlab-runner/templates/role.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: "Role" metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" rules: - apiGroups: [""] resources: ["*"] verbs: ["*"] --- # Source: gitlab-runner/templates/role-binding.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: "RoleBinding" metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" roleRef: apiGroup: rbac.authorization.k8s.io kind: "Role" name: gitlab-runner-gitlab-runner subjects: - kind: ServiceAccount name: gitlab-runner-gitlab-runner namespace: "gitlab" --- # Source: gitlab-runner/templates/deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" spec: replicas: 1 selector: matchLabels: app: gitlab-runner-gitlab-runner template: metadata: labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" annotations: checksum/configmap: a6623303f6fcc3a043e87ea937bb8399d2d0068a901aa9c3419ed5c7a5afa9db checksum/secrets: 32c7d2c16918961b7b84a005680f748e774f61c6f4e4da30650d400d781bbb30 prometheus.io/scrape: 'true' prometheus.io/port: '9252' spec: securityContext: runAsUser: 100 fsGroup: 65533 terminationGracePeriodSeconds: 3600 initContainers: - name: configure command: ['sh', '/config/configure'] image: gitlab/gitlab-runner:alpine-v13.4.1 imagePullPolicy: "IfNotPresent" env: - name: CI_SERVER_URL value: "https://gitlab.qa.joaocunha.eu/" - name: CLONE_URL value: "" - name: RUNNER_REQUEST_CONCURRENCY value: "1" - name: RUNNER_EXECUTOR value: "kubernetes" - name: REGISTER_LOCKED value: "true" - name: RUNNER_TAG_LIST value: "" - name: RUNNER_OUTPUT_LIMIT value: "4096" - name: KUBERNETES_IMAGE value: "ubuntu:16.04" - name: KUBERNETES_PRIVILEGED value: "true" - name: KUBERNETES_NAMESPACE value: "gitlab" - name: KUBERNETES_POLL_TIMEOUT value: "180" - name: KUBERNETES_CPU_LIMIT value: "" - name: KUBERNETES_CPU_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_LIMIT value: "" - name: KUBERNETES_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_CPU_REQUEST value: "" - name: KUBERNETES_CPU_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_REQUEST value: "" - name: KUBERNETES_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_SERVICE_ACCOUNT value: "" - name: KUBERNETES_SERVICE_CPU_LIMIT value: "" - name: KUBERNETES_SERVICE_MEMORY_LIMIT value: "" - name: KUBERNETES_SERVICE_CPU_REQUEST value: "" - name: KUBERNETES_SERVICE_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_CPU_LIMIT value: "" - name: KUBERNETES_HELPER_MEMORY_LIMIT value: "" - name: KUBERNETES_HELPER_CPU_REQUEST value: "" - name: KUBERNETES_HELPER_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_IMAGE value: "" - name: KUBERNETES_PULL_POLICY value: "" volumeMounts: - name: runner-secrets mountPath: /secrets readOnly: false - name: scripts mountPath: /config readOnly: true - name: init-runner-secrets mountPath: /init-secrets readOnly: true resources: {} serviceAccountName: gitlab-runner-gitlab-runner containers: - name: gitlab-runner-gitlab-runner image: gitlab/gitlab-runner:alpine-v13.4.1 imagePullPolicy: "IfNotPresent" lifecycle: preStop: exec: command: ["/entrypoint", "unregister", "--all-runners"] command: ["/bin/bash", "/scripts/entrypoint"] env: - name: CI_SERVER_URL value: "https://gitlab.qa.joaocunha.eu/" - name: CLONE_URL value: "" - name: RUNNER_REQUEST_CONCURRENCY value: "1" - name: RUNNER_EXECUTOR value: "kubernetes" - name: REGISTER_LOCKED value: "true" - name: RUNNER_TAG_LIST value: "" - name: RUNNER_OUTPUT_LIMIT value: "4096" - name: KUBERNETES_IMAGE value: "ubuntu:16.04" - name: KUBERNETES_PRIVILEGED value: "true" - name: KUBERNETES_NAMESPACE value: "gitlab" - name: KUBERNETES_POLL_TIMEOUT value: "180" - name: KUBERNETES_CPU_LIMIT value: "" - name: KUBERNETES_CPU_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_LIMIT value: "" - name: KUBERNETES_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_CPU_REQUEST value: "" - name: KUBERNETES_CPU_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_REQUEST value: "" - name: KUBERNETES_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_SERVICE_ACCOUNT value: "" - name: KUBERNETES_SERVICE_CPU_LIMIT value: "" - name: KUBERNETES_SERVICE_MEMORY_LIMIT value: "" - name: KUBERNETES_SERVICE_CPU_REQUEST value: "" - name: KUBERNETES_SERVICE_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_CPU_LIMIT value: "" - name: KUBERNETES_HELPER_MEMORY_LIMIT value: "" - name: KUBERNETES_HELPER_CPU_REQUEST value: "" - name: KUBERNETES_HELPER_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_IMAGE value: "" - name: KUBERNETES_PULL_POLICY value: "" livenessProbe: exec: command: ["/bin/bash", "/scripts/check-live"] initialDelaySeconds: 60 timeoutSeconds: 1 periodSeconds: 10 successThreshold: 1 failureThreshold: 3 readinessProbe: exec: command: ["/usr/bin/pgrep","gitlab.*runner"] initialDelaySeconds: 10 timeoutSeconds: 1 periodSeconds: 10 successThreshold: 1 failureThreshold: 3 ports: - name: metrics containerPort: 9252 volumeMounts: - name: runner-secrets mountPath: /secrets - name: etc-gitlab-runner mountPath: /home/gitlab-runner/.gitlab-runner - name: scripts mountPath: /scripts resources: {} volumes: - name: runner-secrets emptyDir: medium: "Memory" - name: etc-gitlab-runner emptyDir: medium: "Memory" - name: init-runner-secrets projected: sources: - secret: name: "gitlab-runner-gitlab-runner" items: - key: runner-registration-token path: runner-registration-token - key: runner-token path: runner-token - name: scripts configMap: name: gitlab-runner-gitlab-runner ``` ## Troubleshooting ### Error: `associative list with keys has an element that omits key field "protocol"` Due to [the bug in Kubernetes v1.19](https://github.com/kubernetes-sigs/structured-merge-diff/issues/130), you may see this error when installing GitLab Runner or any other application with the GitLab agent for Kubernetes. To fix it, either: - Upgrade your Kubernetes cluster to v1.20 or later. - Add `protocol: TCP` to `containers.ports` subsection: ```yaml ... ports: - name: metrics containerPort: 9252 protocol: TCP ... ``` ================================================ FILE: docs/install/kubernetes.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner in Kubernetes using the GitLab Helm chart. title: GitLab Runner Helm chart --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} The GitLab Runner Helm chart is the official way to deploy a GitLab Runner instance into your Kubernetes cluster. This chart configures GitLab Runner to: - Run using the [Kubernetes executor](../executors/kubernetes/_index.md) for GitLab Runner. - Provision a new pod in the specified namespace for each new CI/CD job. ## Configure GitLab Runner with the Helm chart Store your GitLab Runner configuration changes in `values.yaml`. For help configuring this file, see: - The default [`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml) configuration in the chart repository. - The Helm documentation for [Values Files](https://helm.sh/docs/chart_template_guide/values_files/), which explains how your values file overrides the default values. For GitLab Runner to run properly, you must set these values in your configuration file: - `gitlabUrl`: The full URL of the GitLab server (like `https://gitlab.example.com`) to register the runner against. - `rbac: { create: true }`: Create RBAC (role-based access control) rules for the GitLab Runner to create pods to run jobs in. - If you want to use an existing `serviceAccount`, add your service account name in `rbac`: ```yaml rbac: create: false serviceAccount: create: false name: your-service-account ``` - To learn about the minimal permissions the `serviceAccount` requires, see [Configure runner API permissions](../executors/kubernetes/_index.md#configure-runner-api-permissions). - `runnerToken`: The authentication token obtained when you [create a runner in the GitLab UI](https://docs.gitlab.com/ci/runners/runners_scope/#create-an-instance-runner-with-a-runner-authentication-token). - Set this token directly or store it in a secret. More [optional configuration settings](kubernetes_helm_chart_configuration.md) are available. You're now ready to [install GitLab Runner](#install-gitlab-runner-with-the-helm-chart)! ## Install GitLab Runner with the Helm chart Prerequisites: - Your GitLab server's API is reachable from the cluster. - Kubernetes 1.4 or later, with beta APIs enabled. - The `kubectl` CLI is installed locally, and authenticated for the cluster. - The [Helm client](https://helm.sh/docs/using_helm/#installing-the-helm-client) is installed locally on your machine. - You've set all [required values in `values.yaml`](#configure-gitlab-runner-with-the-helm-chart). To install GitLab Runner from the Helm chart: 1. Add the GitLab Helm repository: ```shell helm repo add gitlab https://charts.gitlab.io ``` 1. If you use Helm 2, initialize Helm with `helm init`. 1. Check which GitLab Runner versions you have access to: ```shell helm search repo -l gitlab/gitlab-runner ``` 1. If you can't access the latest versions of GitLab Runner, update the chart with this command: ```shell helm repo update gitlab ``` 1. After you [configure](#configure-gitlab-runner-with-the-helm-chart) GitLab Runner in your `values.yaml` file, run this command, changing parameters as needed: ```shell # For Helm 2 helm install --namespace --name gitlab-runner -f gitlab/gitlab-runner # For Helm 3 helm install --namespace gitlab-runner -f gitlab/gitlab-runner ``` - ``: The Kubernetes namespace where you want to install the GitLab Runner. - ``: The path to values file containing your custom configuration. To create it, see [Configure GitLab Runner with the Helm chart](#configure-gitlab-runner-with-the-helm-chart). - To install a specific version of the GitLab Runner Helm chart, add `--version ` to your `helm install` command. You can install any version of the chart, but more recent `values.yml` might be incompatible with older versions of the chart. ### Check available GitLab Runner Helm chart versions Helm charts and GitLab Runner do not follow the same versioning. To see version mappings between the two, run the command for your version of Helm: ```shell # For Helm 2 helm search -l gitlab/gitlab-runner # For Helm 3 helm search repo -l gitlab/gitlab-runner ``` An example of the output: ```plaintext NAME CHART VERSION APP VERSION DESCRIPTION gitlab/gitlab-runner 0.64.0 16.11.0 GitLab Runner gitlab/gitlab-runner 0.63.0 16.10.0 GitLab Runner gitlab/gitlab-runner 0.62.1 16.9.1 GitLab Runner gitlab/gitlab-runner 0.62.0 16.9.0 GitLab Runner gitlab/gitlab-runner 0.61.3 16.8.1 GitLab Runner gitlab/gitlab-runner 0.61.2 16.8.0 GitLab Runner ... ``` ## Upgrade GitLab Runner with the Helm chart Prerequisites: - You've installed your GitLab Runner chart. - You've paused the runner in GitLab. This prevents problems arising with the jobs, such as [authorization errors when they complete](../faq/_index.md#helm-chart-error--unauthorized). - You've ensured all jobs have completed. To change your configuration or update charts, use `helm upgrade`, changing parameters as needed: ```shell helm upgrade --namespace -f gitlab/gitlab-runner ``` - ``: The Kubernetes namespace where you've installed GitLab Runner. - ``: The path to the values file containing your custom configuration. To create it, see [Configure GitLab Runner with the Helm chart](#configure-gitlab-runner-with-the-helm-chart). - ``: The name you gave the chart when you installed it. In the installation section, the example named it `gitlab-runner`. - To update to a specific version of the GitLab Runner Helm chart, rather than the latest one, add `--version ` to your `helm upgrade` command. ## Uninstall GitLab Runner with the Helm chart To uninstall GitLab Runner: 1. Pause the runner in GitLab, and ensure any jobs have completed. This prevents job-related problems, such as [authorization errors on completion](../faq/_index.md#helm-chart-error--unauthorized). 1. Run, this command, modifying it as needed: ```shell helm delete --namespace ``` - `` is the Kubernetes namespace where GitLab Runner is installed. - `` is the name you gave the chart when you installed it. In the [installation section](#install-gitlab-runner-with-the-helm-chart) of this page, we called it `gitlab-runner`. ================================================ FILE: docs/install/kubernetes_helm_chart_configuration.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Configure the GitLab Runner Helm chart --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can add optional configuration to your GitLab Runner Helm chart. ## Use the cache with a configuration template To use the cache with your configuration template, set these variables in `values.yaml`: - `runners.cache.secretName`: The secret name for your object storage provider. Options: `s3access`, `gcsaccess`, `google-application-credentials`, or `azureaccess`. - `runners.config`: Other settings for [the cache](../configuration/advanced-configuration.md#the-runnerscache-section), in TOML format. ### Amazon S3 To configure [Amazon S3 with static credentials](https://aws.amazon.com/blogs/security/wheres-my-secret-access-key/): 1. Add this example to your `values.yaml`, changing values where needed: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "s3" Path = "runner" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" BucketName = "my_bucket_name" BucketLocation = "eu-west-1" Insecure = false AuthenticationType = "access-key" cache: secretName: s3access ``` 1. Create an `s3access` Kubernetes secret that contains `accesskey` and `secretkey`: ```shell kubectl create secret generic s3access \ --from-literal=accesskey="YourAccessKey" \ --from-literal=secretkey="YourSecretKey" ``` ### Google Cloud Storage (GCS) Google Cloud Storage can be configured with static credentials in multiple ways. #### Static credentials directly configured To configure GCS with credentials [with an access ID and a private key](../configuration/advanced-configuration.md#the-runnerscache-section): 1. Add this example to your `values.yaml`, changing values where needed: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "gcs" Path = "runner" Shared = true [runners.cache.gcs] BucketName = "runners-cache" cache: secretName: gcsaccess ``` 1. Create a `gcsaccess` Kubernetes secret that contains `gcs-access-id` and `gcs-private-key`: ```shell kubectl create secret generic gcsaccess \ --from-literal=gcs-access-id="YourAccessID" \ --from-literal=gcs-private-key="YourPrivateKey" ``` #### Static credentials in a JSON file downloaded from GCP To [configure GCS with credentials in a JSON file](../configuration/advanced-configuration.md#the-runnerscache-section) downloaded from Google Cloud Platform: 1. Add this example to your `values.yaml`, changing values where needed: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "gcs" Path = "runner" Shared = true [runners.cache.gcs] BucketName = "runners-cache" cache: secretName: google-application-credentials secrets: - name: google-application-credentials ``` 1. Create a Kubernetes secret called `google-application-credentials` and load the JSON file with it. Change the path as needed: ```shell kubectl create secret generic google-application-credentials \ --from-file=gcs-application-credentials-file=./PATH-TO-CREDENTIALS-FILE.json ``` ### Azure To [configure Azure Blob Storage](../configuration/advanced-configuration.md#the-runnerscacheazure-section): 1. Add this example to your `values.yaml`, changing values where needed: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "azure" Path = "runner" Shared = true [runners.cache.azure] ContainerName = "CONTAINER_NAME" StorageDomain = "blob.core.windows.net" cache: secretName: azureaccess ``` 1. Create an `azureaccess` Kubernetes secret that contains `azure-account-name` and `azure-account-key`: ```shell kubectl create secret generic azureaccess \ --from-literal=azure-account-name="YourAccountName" \ --from-literal=azure-account-key="YourAccountKey" ``` To learn more about Helm chart caching, see [`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml). ### Persistent volume claim You can use persistent volume claims (PVCs) for caching if none of the object storage options work for you. To configure your cache to use a PVC: 1. [Create a PVC](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims) in the namespace where job pods will run. > [!note] > If you want multiple job pods to access the same cache PVC, it must have the `ReadWriteMany` access mode. 1. Mount the PVC to the `/cache` directory: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [[runners.kubernetes.volumes.pvc]] name = "cache-pvc" mount_path = "/cache" ``` ### Network File System Use a Network File System (NFS) for caching when object storage is not available. Prerequisites: - NFS is configured and accessible in your Kubernetes cluster. For more information, see [`nfs` volume](https://kubernetes.io/docs/concepts/storage/volumes/#nfs) in Kubernetes documentation. To configure your cache to use NFS: 1. Mount the NFS volume to the `/cache` directory: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [[runners.kubernetes.volumes.nfs]] name = "nfs" mount_path = "/cache" read_only = false server = "foo.bar.com" path = "/path/on/nfs-share" ``` ## Enable RBAC support If your cluster has RBAC (role-based access controls) enabled, the chart can create its own service account, or you can [provide one](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#service-account-permissions). - To have the chart create the service account for you, set `rbac.create` to true: ```yaml rbac: create: true ``` - To use an existing service account, set a `serviceAccount.name`: ```yaml rbac: create: false serviceAccount: create: false name: your-service-account ``` ## Control maximum runner concurrency A single runner deployed on Kubernetes can run multiple jobs in parallel by starting additional Runner pods. To change the maximum number of pods allowed at one time, edit the [`concurrent` setting](../configuration/advanced-configuration.md#the-global-section). It defaults to `10`: ```yaml ## Configure the maximum number of concurrent jobs ## ref: https://docs.gitlab.com/runner/configuration/advanced-configuration/#the-global-section ## concurrent: 10 ``` For more information about this setting, see [the global section](../configuration/advanced-configuration.md#the-global-section) in the advanced configuration documentation for GitLab Runner. ## Run Docker-in-Docker containers with GitLab Runner To use Docker-in-Docker containers with GitLab Runner: - To enable it, see [Use privileged containers for the runners](#use-privileged-containers-for-the-runners). - For instructions on running Docker-in-Docker, see the [GitLab Runner documentation](../executors/kubernetes/_index.md#using-docker-in-builds). ## Use privileged containers for the runners To use the Docker executable in your GitLab CI/CD jobs, configure the runner to use privileged containers. Prerequisites: - You understand the risks, which are described in the [GitLab CI/CD Runner documentation](../executors/kubernetes/_index.md#using-docker-in-builds). - Your GitLab Runner instance is registered against a specific project in GitLab, and you trust its CI/CD jobs. To enable privileged mode in `values.yaml`, add these lines: ```yaml runners: config: | [[runners]] [runners.kubernetes] # Run all containers with the privileged flag enabled. privileged = true ... ``` For more information, see the advanced configuration information about the [`[runners.kubernetes]`](../configuration/advanced-configuration.md#the-runnerskubernetes-section) section. ## Use an image from a private registry To use an image from a private registry, configure `imagePullSecrets`. 1. Create one or more secrets in the Kubernetes namespace used for the CI/CD job. This command creates a secret that works with `image_pull_secrets`: ```shell kubectl create secret docker-registry \ --namespace \ --docker-server="https://" \ --docker-username="" \ --docker-password="" ``` 1. For GitLab Runner Helm chart version 0.53.x and later, in `config.toml`, set `image_pull_secret` from the template provided in `runners.config`: ```yaml runners: config: | [[runners]] [runners.kubernetes] ## Specify one or more imagePullSecrets ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ ## image_pull_secrets = [your-image-pull-secret] ``` For more information, see [Pull an image from a private registry](https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/) in the Kubernetes documentation. 1. For GitLab Runner Helm chart version 0.52 and earlier, in `values.yaml`, set a value for `runners.imagePullSecrets`. When you set this value, the container adds `--kubernetes-image-pull-secrets ""` to the image entrypoint script. This eliminates the need to configure the `image_pull_secrets` parameter in the Kubernetes executor `config.toml` settings. ```yaml runners: imagePullSecrets: [your-image-pull-secret] ``` > [!note] > The value of `imagePullSecrets` is not prefixed by a `name` tag, as is the convention in Kubernetes resources. This value requires > an array of one or more secret names, even if you use only one registry credential. For more details on how to create `imagePullSecrets`, see [Pull an Image from a Private Registry](https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/) in the Kubernetes documentation. When a job Pod is being created, GitLab Runner automatically handles image access in two steps: 1. GitLab Runner converts any existing Docker credentials into Kubernetes secrets so they can pull images from registries. It also checks that any manually configured imagePullSecrets actually exist in the cluster. For more information about statically defined credentials, credentials stores, or credential helpers, see [Access an image from a private container registry](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry). 1. GitLab Runner creates the job Pod and attaches both types of credentials to it: the `imagePullSecrets` and the converted Docker credentials, in that order. When Kubernetes needs to pull the container image, it tries the credentials one by one until it finds the one that works. ## Access GitLab with a custom certificate To use a custom certificate, provide a [Kubernetes secret](https://kubernetes.io/docs/concepts/configuration/secret/) to the GitLab Runner Helm chart. This secret is added to the container's `/home/gitlab-runner/.gitlab-runner/certs` directory: 1. [Prepare your certificate](#prepare-your-certificate) 1. [Create a Kubernetes secret](#create-a-kubernetes-secret) 1. [Provide the secret to the chart](#provide-the-secret-to-the-chart) ### Prepare your certificate Each key name in the Kubernetes secret is used as a filename in the directory, with the file content being the value associated with the key: - The filename used should be in the format `.crt`, for example `gitlab.your-domain.com.crt`. - Concatenate any intermediate certificates together with your server certificate in the same file. - The hostname used should be the one the certificate is registered for. ### Create a Kubernetes secret If you installed GitLab Helm chart using the [auto-generated self-signed wildcard certificate](https://docs.gitlab.com/charts/installation/tls/#option-4-use-auto-generated-self-signed-wildcard-certificate) method, a secret was created for you. If you did not install GitLab Helm chart with the auto-generated self-signed wildcard certificate, create a secret. These commands store your certificate as a secret in Kubernetes, and present it to the GitLab Runner containers as a file. - If your certificate is in the current directory, and follows the format ``, modify this command as needed: ```shell kubectl create secret generic \ --namespace \ --from-file= ``` - ``: The Kubernetes namespace where you want to install the GitLab Runner. - ``: The Kubernetes Secret resource name, like `gitlab-domain-cert`. - ``: The filename for the certificate in your current directory to import into the secret. - If your certificate is in another directory, or doesn't follow the format ``, you must specify the filename to use as the target: ```shell kubectl create secret generic \ --namespace \ --from-file== ``` - `` is the name of the certificate file as presented to the Runner containers, like `gitlab.hostname.crt`. - `` is the filename for the certificate, relative to your current directory, to import into the secret. For example: `cert-directory/my-gitlab-certificate.crt`. ### Provide the secret to the chart In `values.yaml`, set `certsSecretName` to the resource name of a Kubernetes secret object in the same namespace. This enables you to pass your custom certificate for GitLab Runner to use. In the previous example, the resource name was `gitlab-domain-cert`: ```yaml certsSecretName: ``` For more information, see the [supported options for self-signed certificates](../configuration/tls-self-signed.md#supported-options-for-self-signed-certificates-targeting-the-gitlab-server) targeting the GitLab server. ## Set pod labels to CI environment variable keys You can't use environment variables as pod labels in the `values.yaml` file. For more information, see [Can't set environment variable key as pod label](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/173). Use [the workaround described in the issue](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/173#note_351057890) as a temporary solution. ## Switch to the Ubuntu-based `gitlab-runner` Docker image By default, the GitLab Runner Helm chart uses the Alpine version of the `gitlab/gitlab-runner` image, which uses `musl libc`. You might need to switch to the Ubuntu-based image, which uses `glibc`. To do this, specify the image your `values.yaml` file with the following values: ```yaml # Specify the Ubuntu image, and set the version. You can also use the `ubuntu` or `latest` tags. image: gitlab/gitlab-runner:v17.3.0 # Update the security context values to the user ID in the Ubuntu image securityContext: fsGroup: 999 runAsUser: 999 ``` ## Run with non-root user By default, the GitLab Runner images don't work with non-root users. The [GitLab Runner UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766421) and [GitLab Runner Helper UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766433) images are designed for that scenario. To use them, change the GitLab Runner and GitLab Runner Helper images in `values.yaml`: ```yaml image: registry: registry.gitlab.com image: gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-ocp tag: v16.11.0 securityContext: runAsNonRoot: true runAsUser: 999 runners: config: | [[runners]] [runners.kubernetes] helper_image = "registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp:x86_64-v16.11.0" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 ``` Although `run_as_user` points to the user ID of `nonroot` user (59417), the images work with any user ID. It's important that this user ID is part of the root group. Being part of the root group doesn't give it any specific privileges. ## Use a FIPS-compliant GitLab Runner To use a [FIPS-compliant GitLab Runner](requirements.md#fips-compliant-gitlab-runner), change the GitLab Runner image and the Helper image in `values.yaml`: ```yaml image: registry: docker.io image: gitlab/gitlab-runner tag: ubi-fips runners: config: | [[runners]] [runners.kubernetes] helper_image_flavor = "ubi-fips" ``` ## Use a configuration template To [configure the behavior of GitLab Runner build pod in Kubernetes](../executors/kubernetes/_index.md#configuration-settings), use a [configuration template file](../register/_index.md#register-with-a-configuration-template). Configuration templates can configure any field on the runner, without sharing specific runner configuration options with the Helm chart. For example, these default settings [found in the `values.yaml` file](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml) in the `chart` repository: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" ``` Values in the `config:` section should use TOML (` = ` instead of `: `, as `config.toml` is embedded in `values.yaml`. For executor-specific configuration, see [the `values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml) file. ================================================ FILE: docs/install/kubernetes_troubleshooting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Troubleshooting GitLab Runner Helm chart --- ## Error: `Job failed (system failure): secrets is forbidden` If you see the following error, [enable RBAC support](kubernetes_helm_chart_configuration.md#enable-rbac-support) to correct it: ```plaintext Using Kubernetes executor with image alpine ... ERROR: Job failed (system failure): secrets is forbidden: User "system:serviceaccount:gitlab:default" cannot create resource "secrets" in API group "" in the namespace "gitlab" ``` ## Error: `Unable to mount volumes for pod` If you see mount volume failures for a required secret, ensure that you have stored registration tokens or runner tokens in secrets. ## Slow artifact uploads to Google Cloud Storage Artifact uploads to Google Cloud Storage can experience reduced performance (a slower bandwidth rate) due to the runner helper pod becoming CPU bound. To mitigate this problem, increase the Helper pod CPU Limit: ```yaml runners: config: | [[runners]] [runners.kubernetes] helper_cpu_limit = "250m" ``` For more information, see [issue 28393](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28393#note_722733798). ## Error: `PANIC: creating directory: mkdir /nonexistent: permission denied` To resolve this error, switch to the [Ubuntu-based GitLab Runner Docker image](kubernetes_helm_chart_configuration.md#switch-to-the-ubuntu-based-gitlab-runner-docker-image). ## Error: `invalid header field for "Private-Token"` You might see this error if the `runner-token` value in `gitlab-runner-secret` is base64-encoded with a newline character (`\n`) at the end: ```plaintext couldn't execute POST against "https:/gitlab.example.com/api/v4/runners/verify": net/http: invalid header field for "Private-Token" ``` To resolve this issue, ensure a newline (`\n`) is not appended to the token value. For example: `echo -n | base64`. ## Error: `FATAL: Runner configuration is reserved` You might get the following error in the pod logs after installing the GitLab Runner Helm chart: ```plaintext FATAL: Runner configuration other than name and executor configuration is reserved (specifically --locked, --access-level, --run-untagged, --maximum-timeout, --paused, --tag-list, and --maintenance-note) and cannot be specified when registering with a runner authentication token. This configuration is specified on the GitLab server. Please try again without specifying any of those arguments ``` This error happens when you use an authentication token, and provide a token through a secret. To fix it, review your values YAML file and make sure that you are not using any deprecated values. For more information about which values are deprecated, see [Installing GitLab Runner with Helm chart](https://docs.gitlab.com/ci/runners/new_creation_workflow/#installing-gitlab-runner-with-helm-chart). ================================================ FILE: docs/install/linux-manually.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Manually download and install the GitLab Runner binary on Linux. title: Install GitLab Runner manually on GNU/Linux --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can install GitLab Runner manually by using a `deb` or `rpm` package or a binary file. Use this approach as a last resort if: - You can't use the deb/rpm repository to install GitLab Runner - Your GNU/Linux OS is not supported ## Prerequisites Before running GitLab Runner manually: - If you plan to use the Docker executor, install Docker first. - Review the FAQ section for common problems and solutions. ## Using deb/rpm package You can download and install GitLab Runner by using a `deb` or `rpm` package. ### Download To download the appropriate package for your system: 1. Find the latest filename and options at . 1. Download the runner-helper version that matches your package manager or architecture. 1. Choose a version and download a binary, as described in the documentation for [downloading any other tagged releases](bleeding-edge.md#download-any-other-tagged-release) for bleeding edge GitLab Runner releases. For example, for Debian or Ubuntu: ```shell # Replace ${arch} with any of the supported architectures, e.g. amd64, arm, arm64 # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner-helper-images.deb" curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner_${arch}.deb" ``` For example, for CentOS or Red Hat Enterprise Linux: ```shell # Replace ${arch} with any of the supported architectures, e.g. x86_64, aarch64, armhfp # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner-helper-images.rpm" curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner_${arch}.rpm" ``` For example, for FIPS compliant GitLab Runner on RHEL: ```shell # Currently only x86_64 is a supported arch # The FIPS compliant GitLab Runner version continues to include the helper images in one package. # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner_x86_64-fips.rpm" ``` ### Install 1. Install the package for your system as follows. For example, for Debian or Ubuntu: ```shell dpkg -i gitlab-runner-helper-images.deb gitlab-runner_.deb ``` For example, for CentOS or Red Hat Enterprise Linux: ```shell dnf install -y gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ### Upgrade Download the latest package for your system then upgrade as follows: For example, for Debian or Ubuntu: ```shell dpkg -i gitlab-runner_.deb ``` For example, for CentOS or Red Hat Enterprise Linux: ```shell dnf install -y gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ## Using binary file You can download and install GitLab Runner by using a binary file. ### Install 1. Download one of the binaries for your system: ```shell # Linux x86-64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64" # Linux x86 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-386" # Linux arm sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-arm" # Linux arm64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-arm64" # Linux s390x sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-s390x" # Linux ppc64le sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-ppc64le" # Linux riscv64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-riscv64" # Linux loong64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-loong64" # Linux x86-64 FIPS Compliant sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64-fips" ``` You can download a binary for every available version as described in [Bleeding Edge - download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Give it permissions to execute: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. Create a GitLab CI user: ```shell sudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash ``` 1. Install and run as service: ```shell sudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner sudo gitlab-runner start ``` Ensure you have `/usr/local/bin/` in `$PATH` for root or you might get a `command not found` error. Alternately, you can install `gitlab-runner` in a different location, like `/usr/bin/`. > [!note] > If `gitlab-runner` is installed and run as a service, it runs as root, > but executes jobs as a user specified by the `install` command. > This means that some of the job functions like cache and > artifacts must execute the `/usr/local/bin/gitlab-runner` command. > Therefore, the user under which jobs are run needs to have access to the executable. ### Upgrade 1. Stop the service (you need elevated command prompt as before): ```shell sudo gitlab-runner stop ``` 1. Download the binary to replace the GitLab Runner executable. For example: ```shell sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64" ``` You can download a binary for every available version as described in [Bleeding Edge - download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Give it permissions to execute: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. Start the service: ```shell sudo gitlab-runner start ``` ## Next steps After installation, [register a runner](../register/_index.md) to complete the setup. The runner binary doesn't include pre-built helper images. You can use these commands to download the corresponding version of the helper image archive and copy it to the appropriate location: ```shell mkdir -p /usr/local/bin/out/helper-images cd /usr/local/bin/out/helper-images ``` Choose the appropriate helper image for your architecture:
Ubuntu helper images ```shell # Linux x86-64 ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-x86_64.tar.xz # Linux x86-64 ubuntu pwsh wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-x86_64-pwsh.tar.xz # Linux s390x ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-s390x.tar.xz # Linux ppc64le ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-ppc64le.tar.xz # Linux arm64 ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-arm64.tar.xz # Linux arm ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-arm.tar.xz # Linux x86-64 ubuntu specific version - v17.10.0 wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/v17.10.0/helper-images/prebuilt-ubuntu-x86_64.tar.xz ```
Alpine helper images ```shell # Linux x86-64 alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-x86_64.tar.xz # Linux x86-64 alpine pwsh wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-x86_64-pwsh.tar.xz # Linux s390x alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-s390x.tar.xz # Linux riscv64 alpine edge wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-edge-riscv64.tar.xz # Linux arm64 alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-arm64.tar.xz # Linux arm alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-arm.tar.xz ```
## Additional resources - [Docker executor documentation](../executors/docker.md) - [Install Docker](https://docs.docker.com/engine/install/centos/#install-docker-ce) - [Download other GitLab Runner versions](bleeding-edge.md#download-any-other-tagged-release) - [FIPS compliant GitLab Runner information](requirements.md#fips-compliant-gitlab-runner) - [GitLab Runner FAQ](../faq/_index.md) - [deb/rpm repository installation](linux-repository.md) ================================================ FILE: docs/install/linux-repository.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner from a GitLab repository using your package manager. title: Install GitLab Runner using the official GitLab repositories --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} To install GitLab Runner, you can use a package from [the GitLab repository](https://packages.gitlab.com/runner/gitlab-runner). ## Supported distributions GitLab provides packages for the following supported versions of Linux distributions. New runner `deb` or `rpm` packages for new OS distribution releases are added automatically when supported by our package hosting system. ### Deb-based Distributions | Distribution | Supported Versions | |--------------|--------------------| | Debian | Duke, Forky, Trixie, Bookworm, Bullseye | | LinuxMint | Xia, Wilma, Virginia, Victoria, Vera, Vanessa | | Raspbian | Duke, Forky, Trixie, Bookworm, Bullseye | | Ubuntu | Questing, Noble, Jammy, Focal, Bionic | ### Rpm-based Distributions | Distribution | Supported Versions | |--------------|--------------------| | Amazon Linux | 2025, 2023, 2 | | Red Hat Enterprise Linux | 10, 9, 8, 7 | | Fedora | 43, 42 | | Oracle Linux | 10, 9, 8, 7 | | openSUSE | 16.0, 15.6 | | SUSE Linux Enterprise Server | 15.7, 15.6, 15.5, 15.4, 12.5 | Depending on your setup, other Debian or RPM based distributions may also be supported. This refers to distributions that are derivative of a supported GitLab Runner distribution and that have compatible package repositories. For example, Deepin is a Debian derivative. So, the runner `deb` package should install and run on Deepin. You may also be able to [install GitLab Runner as a binary](linux-manually.md#using-binary-file) on other Linux distributions. > [!note] > Packages for distributions that are not on the list are not available from our package repository. You can [install](linux-manually.md#using-debrpm-package) them manually by downloading the RPM or DEB package from our S3 bucket. ## Install GitLab Runner To install GitLab Runner: 1. Add the official GitLab repository: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} 1. Download the repository configuration script: ```shell curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" -o script.deb.sh ``` 1. Inspect the script before running it: ```shell less script.deb.sh ``` 1. Run the script: ```shell sudo bash script.deb.sh ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} 1. Download the repository configuration script: ```shell curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.rpm.sh" -o script.rpm.sh ``` 1. Inspect the script before running it: ```shell less script.rpm.sh ``` 1. Run the script: ```shell sudo bash script.rpm.sh ``` {{< /tab >}} {{< /tabs >}} 1. Install the latest version of GitLab Runner, or skip to the next step to install a specific version: > [!note] > The `skel` directory usage is disabled by default to prevent > [`No such file or directory` job failures](#error-no-such-file-or-directory-job-failures). {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell sudo apt install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell sudo yum install gitlab-runner or sudo dnf install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} > [!note] > A FIPS 140-2 compliant version of GitLab Runner is > available for RHEL distributions. You can install this version by using > `gitlab-runner-fips` as the package name, instead of `gitlab-runner`. 1. To install a specific version of GitLab Runner: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} > [!note] > As of `gitlab-runner` version `v17.7.1`, when you install a specific version of `gitlab-runner` that is not the latest > version, you must explicitly install the required `gitlab-runner-helper-packages` for that version. This requirement > exists due to an `apt`/`apt-get` limitation. ```shell apt-cache madison gitlab-runner sudo apt install gitlab-runner=17.7.1-1 gitlab-runner-helper-images=17.7.1-1 ``` If you attempt to install a specific version of `gitlab-runner` without installing the same version of `gitlab-runner-helper-images`, you might encounter the following error: ```shell sudo apt install gitlab-runner=17.7.1-1 ... The following packages have unmet dependencies: gitlab-runner : Depends: gitlab-runner-helper-images (= 17.7.1-1) but 17.8.3-1 is to be installed E: Unable to correct problems, you have held broken packages. ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell yum list gitlab-runner --showduplicates | sort -r sudo yum install gitlab-runner-17.2.0-1 ``` {{< /tab >}} {{< /tabs >}} 1. [Register a runner](../register/_index.md). After completing the above steps, a runner can be started and can be used with your projects! Make sure that you read the [FAQ](../faq/_index.md) section which describes some of the most common problems with GitLab Runner. ## Helper images package The `gitlab-runner-helper-images` package contains pre-built helper container images that GitLab Runner uses during job execution. These images provide the necessary tools and utilities to clone repositories, upload artifacts, and manage caches. The `gitlab-runner-helper-images` package includes helper images for the following operating systems and architectures: Alpine-based images (latest): - `alpine-arm` - `alpine-arm64` - `alpine-riscv64` - `alpine-s390x` - `alpine-x86_64` - `alpine-x86_64-pwsh` Ubuntu-based images (24.04): - `ubuntu-arm` - `ubuntu-arm64` - `ubuntu-ppc64le` - `ubuntu-s390x` - `ubuntu-x86_64` - `ubuntu-x86_64-pwsh` ### Automatic helper image download If a helper image for a specific operating system and architecture combination is not available on the host system, GitLab Runner automatically downloads the required image when needed. Manual installation is not required for architectures that are not included in the `gitlab-runner-helper-images package`. This automatic download ensures that the runner can support additional architectures (such as `loong64`) without requiring manual intervention or separate package installations. ## Upgrade GitLab Runner To install the latest version of GitLab Runner: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell sudo apt update sudo apt install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell sudo yum update sudo yum install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} ## GPG signatures for package installation The GitLab Runner project provides two types of GPG signatures for the package installation method: - [Repository metadata signing](#repository-metadata-signing) - [Package signing](#package-signing) ### Repository metadata signing To verify that the package information downloaded from the remote repository can be trusted, the package manager uses repository metadata signing. The signature is verified when you use a command like `apt-get update`, so the information about available packages is updated **before any package is downloaded and installed**. Verification failure should also cause the package manager to reject the metadata. This means that you cannot download and install any package from the repository until the problem that caused the signature mismatch is found and resolved. GPG public keys used for package metadata signature verification are installed automatically on first installation done with the instructions above. For key updates in the future, existing users need to manually download and install the new keys. We use one key for all our projects hosted under . You can find the details about the key used in the [Linux package documentation](https://docs.gitlab.com/omnibus/update/package_signatures/#package-repository-metadata-signing-key). This documentation page lists also [all keys used in the past](https://docs.gitlab.com/omnibus/update/package_signatures/#previous-package-signing-keys). ### Package signing Repository metadata signing proves that the downloaded version information originates at . It does not prove the integrity of the packages themselves. Whatever was uploaded to - authorized or not - is properly verified until the metadata transfer from repository to the user was not affected. With package signing, each package is signed when it's built. Until you can trust the build environment and the secrecy of the used GPG key, you cannot verify package authenticity. A valid signature on the package proves that its origin is authenticated and its integrity was not violated. Package signing verification is enabled by default only in some of the Debian/RPM based distributions. To use this type of verification, you might need to adjust the configuration. GPG keys used for package signature verification can be different for each of the repositories hosted at . The GitLab Runner project uses its own key pair for this type of the signature. #### RPM-based distributions The RPM format contains a full implementation of GPG signing functionality, and thus is fully integrated with the package management systems based upon that format. You can find the technical description of how to configure package signature verification for RPM-based distributions in the [Linux package documentation](https://docs.gitlab.com/omnibus/update/package_signatures/#rpm-based-distributions). The GitLab Runner differences are: - The public key package that should be installed is named `gpg-pubkey-35dfa027-60ba0235`. - The repository file for RPM-based distributions is named `/etc/yum.repos.d/runner_gitlab-runner.repo` (for the stable release) or `/etc/yum.repos.d/runner_unstable.repo` (for the unstable releases). - The [package signing public key](#current-gpg-public-key) can be imported from `https://packages.gitlab.com/gpgkey/runner/49F16C5CC3A0F81F.pub.gpg`. #### Debian-based distributions The `deb` format does not officially contain a default and included method for signing packages. The GitLab Runner project uses the `dpkg-sig` tool for signing and verifying signatures on packages. This method supports only manual verification of packages. To verify a `deb` package: 1. Install `dpkg-sig`: ```shell apt update && apt install dpkg-sig ``` 1. Download and import the [package signing public key](#current-gpg-public-key): ```shell curl -JLO "https://packages.gitlab.com/gpgkey/runner/49F16C5CC3A0F81F.pub.gpg" gpg --import runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg ``` 1. Verify downloaded package with `dpkg-sig`: ```shell dpkg-sig --verify gitlab-runner_amd64.deb Processing gitlab-runner_amd64.deb... GOODSIG _gpgbuilder 931DA69CFA3AFEBBC97DAA8C6C57C29C6BA75A4E 1623755049 ``` If a package has an invalid signature or signed with an invalid key (for example a revoked one), the output is similar to the following: ```shell dpkg-sig --verify gitlab-runner_amd64.deb Processing gitlab-runner_amd64.deb... BADSIG _gpgbuilder ``` If the key is not present in the user's keyring, the output is similar to: ```shell dpkg-sig --verify gitlab-runner_amd64.v13.1.0.deb Processing gitlab-runner_amd64.v13.1.0.deb... UNKNOWNSIG _gpgbuilder 880721D4 ``` #### Current GPG public key Download the current public GPG key used for package signing from `https://packages.gitlab.com/runner/gitlab-runner/gpgkey/runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg`. | Key Attribute | Value | |---------------|-------| | Name | `GitLab, Inc.` | | EMail | `support@gitlab.com` | | Fingerprint | `931D A69C FA3A FEBB C97D AA8C 6C57 C29C 6BA7 5A4E` | | Expiry | `2026-04-28` | > [!note] > The same key is used by the GitLab Runner project to sign `release.sha256` files for the S3 releases > available in the `` bucket. #### Previous GPG public keys Keys used in the past can be found in the table below. For keys that were revoked, it's highly recommended to remove them from the package signing verification configuration. Signatures made by the following keys should not be trusted anymore. | Sl. No. | Key Fingerprint | Status | Expiry Date | Download (revoked keys only) | |---------|------------------------------------------------------|-----------|--------------|------------------------------| | 1 | `3018 3AC2 C4E2 3A40 9EFB E705 9CE4 5ABC 8807 21D4` | `revoked` | `2021-06-08` | [revoked key](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/docs/install/gpg-keys/9CE45ABC880721D4.pub.gpg) | | 2 | `09E5 7083 F34C CA94 D541 BC58 A674 BF81 35DF A027` | `revoked` | `2023-04-26` | [revoked key](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/docs/install/gpg-keys/A674BF8135DFA027.pub.gpg) | ## Troubleshooting Here are some tips on troubleshooting and resolving issues when installing GitLab Runner. ### Error: `No such file or directory` job failures Sometimes the default skeleton (`skel`) directory causes issues for GitLab Runner, and it fails to run a job. See [issue 4449](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4449) and [issue 1379](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1379). To avoid this, when you install GitLab Runner, a `gitlab-runner` user is created, and by default, the home directory is created without any skeleton in it. Shell configuration added to the home directory with the usage of `skel` may interfere with the job execution. This configuration can introduce unexpected problems like the ones mentioned above. If you had created the runner before the avoidance of `skel` was made the default behavior, you can try removing the following dotfiles: ```shell sudo rm /home/gitlab-runner/.profile sudo rm /home/gitlab-runner/.bashrc sudo rm /home/gitlab-runner/.bash_logout ``` If you need to use the `skel` directory to populate the newly created `$HOME` directory, you must set the `GITLAB_RUNNER_DISABLE_SKEL` variable explicitly to `false` before you install the runner: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell export GITLAB_RUNNER_DISABLE_SKEL=false; sudo -E apt-get install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell export GITLAB_RUNNER_DISABLE_SKEL=false; sudo -E yum install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} ================================================ FILE: docs/install/operator.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner using the GitLab Operator for Kubernetes. title: Install GitLab Runner Operator --- ## Install on Red Hat OpenShift {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Install GitLab Runner on Red Hat OpenShift v4 and later using the [GitLab Runner Operator](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator) from the stable channel of OperatorHub in OpenShift's web console. Once installed, you can run your GitLab CI/CD jobs using the newly deployed GitLab Runner instance. Each CI/CD job runs in a separate pod. ### Prerequisites - OpenShift 4.x cluster with administrator privileges - GitLab Runner registration token ### Install the OpenShift Operator First you must install the OpenShift Operator. 1. Open the OpenShift UI and sign in as a user with administrator privileges. 1. In the left pane, select **Operators**, then **OperatorHub**. 1. In the main pane, below **All Items**, search for the keyword `GitLab Runner`. ![GitLab Operator](img/openshift_allitems_v13_3.png) 1. To install, select the GitLab Runner Operator. 1. On the GitLab Runner Operator summary page, select **Install**. 1. On the Install Operator page: 1. Under **Update Channel**, select **stable**. 1. Under **Installed Namespace**, select the desired namespace and select **Install**. ![GitLab Operator Install Page](img/openshift_installoperator_v13_3.png) On the Installed Operators page, when the GitLab Operator is ready, the status changes to **Succeeded**. ![GitLab Operator Install Status](img/openshift_success_v13_3.png) ## Install on Kubernetes {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Install GitLab Runner on Kubernetes v1.21 and later using the [GitLab Runner Operator](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator) from the stable channel of [OperatorHub.io](https://operatorhub.io/operator/gitlab-runner-operator). Once installed, you can run your GitLab CI/CD jobs using the newly deployed GitLab Runner instance. Each CI/CD job runs in a separate pod. ### Prerequisites - Kubernetes v1.21 and later - Cert manager v1.7.1 ### Install the Kubernetes Operator Follow the instructions at [OperatorHub.io](https://operatorhub.io/operator/gitlab-runner-operator). 1. Install the prerequisites. 1. On the top right, select **Install** and follow the instructions to install `olm` and the Operator. #### Install GitLab Runner 1. Obtain a runner authentication token. You can either: - Create an [instance](https://docs.gitlab.com/ci/runners/runners_scope/#create-an-instance-runner-with-a-runner-authentication-token), [group](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-group-runner-with-a-runner-authentication-token), or [project](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token) runner. - Locate the runner authentication token in the `config.toml` file. Runner authentication tokens have the prefix, `glrt-`. 1. Create the secret file with your GitLab Runner token: ```shell cat > gitlab-runner-secret.yml << EOF apiVersion: v1 kind: Secret metadata: name: gitlab-runner-secret type: Opaque # Only one of the following fields can be set. The Operator fails to register the runner if both are provided. # NOTE: runner-registration-token is deprecated and will be removed in GitLab 18.0. You should use runner-token instead. stringData: runner-token: REPLACE_ME # your project runner token # runner-registration-token: "" # your project runner secret EOF ``` 1. Create the `secret` in your cluster by running: ```shell kubectl apply -f gitlab-runner-secret.yml ``` 1. Create the Custom Resource Definition (CRD) file and include the following configuration. ```shell cat > gitlab-runner.yml << EOF apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: gitlab-runner spec: gitlabUrl: https://gitlab.example.com buildImage: alpine token: gitlab-runner-secret EOF ``` 1. Now apply the `CRD` file by running the command: ```shell kubectl apply -f gitlab-runner.yml ``` 1. Confirm that GitLab Runner is installed by running: ```shell kubectl get runner NAME AGE gitlab-runner 5m ``` 1. The runner pod should also be visible: ```shell kubectl get pods NAME READY STATUS RESTARTS AGE gitlab-runner-bf9894bdb-wplxn 1/1 Running 0 5m ``` #### Install other versions of GitLab Runner Operator for OpenShift If you do not want to use the available GitLab Runner Operator version in the Red Hat OperatorHub, you can install a different version. To find out the official available Operator versions, view the [tags in the `gitlab-runner-operator` repository](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/tags). To find out which version of GitLab Runner the Operator is running, view the contents of the `APP_VERSION` file of the commit or tag you are interested in, for example, . To install a specific version, create this `catalogsource.yaml` file and replace `` with a tag or a specific commit: > [!note] > When using an image for a specific commit, the tag format is `v0.0.1-`. For example: `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator-catalog-source:v0.0.1-f5a798af`. ```yaml apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource metadata: name: gitlab-runner-catalog namespace: openshift-marketplace spec: sourceType: grpc image: registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator-catalog-source: displayName: GitLab Runner Operators publisher: GitLab Community ``` Create the `CatalogSource` with: ```shell oc apply -f catalogsource.yaml ``` In a minute the new Runner should show up in the OpenShift cluster's OperatorHub section. ## Install GitLab Runner Operator on Kubernetes clusters in offline environments Prerequisites: - Images required by the installation process are accessible. To pull container images during installation, the GitLab Runner Operator requires a connection to the public internet on an external network. If you have Kubernetes clusters installed in an offline environment, use a local image registry or package repository to pull images or packages during installation. The local repository must provide the following images: | Image | Default value | |-------------------------------------------------------|---------------| | **GitLab Runner Operator** image | `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator:vGITLAB_RUNNER_OPERATOR_VERSION` | | **GitLab Runner** and **GitLab Runner Helper** images | These images are downloaded from the GitLab Runner UBI Images registry and are used when installing the Runner Custom Resources. The version used depends on your requirements. | | **RBAC Proxy** image | `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/openshift4/ose-kube-rbac-proxy:v4.13.0` | 1. Set up local repositories or registries in the disconnected network environment to host the downloaded software packages and container images. You can use: - A Docker registry for container images. - A local package registry for Kubernetes binaries and dependencies. 1. For GitLab Runner Operator v1.23.2 and later, download the latest version of `operator.k8s.yaml` file: ```shell curl -O "https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner- operator/-/releases/vGITLAB_RUNNER_OPERATOR_VERSION/downloads/operator.k8s.yaml" ``` 1. In the `operator.k8s.yaml` file, update the following URLs: - `GitLab Runner Operator image` - `RBAC Proxy image` 1. Install the updated version of the `operator.k8s.yaml` file: ```shell kubectl apply -f PATH_TO_UPDATED_OPERATOR_K8S_YAML GITLAB_RUNNER_OPERATOR_VERSION = 1.23.2+ ``` ## Uninstall Operator ### Uninstall on Red Hat OpenShift 1. Delete Runner `CRD`: ```shell kubectl delete -f gitlab-runner.yml ``` 1. Delete `secret`: ```shell kubectl delete -f gitlab-runner-secret.yml ``` 1. Follow the instructions at the Red Hat documentation for [Deleting Operators from a cluster using the web console](https://docs.redhat.com/en/documentation/openshift_container_platform/4.7/html/operators/administrator-tasks#olm-deleting-operators-from-a-cluster-using-web-console_olm-deleting-operators-from-a-cluster). ### Uninstall on Kubernetes 1. Delete Runner `CRD`: ```shell kubectl delete -f gitlab-runner.yml ``` 1. Delete `secret`: ```shell kubectl delete -f gitlab-runner-secret.yml ``` 1. Delete the Operator subscription: ```shell kubectl delete subscription my-gitlab-runner-operator -n operators ``` 1. Find out the version of the installed `CSV`: ```shell kubectl get clusterserviceversion -n operators NAME DISPLAY VERSION REPLACES PHASE gitlab-runner-operator.v1.7.0 GitLab Runner 1.7.0 Succeeded ``` 1. Delete the `CSV`: ```shell kubectl delete clusterserviceversion gitlab-runner-operator.v1.7.0 -n operators ``` #### Configuration To configure GitLab Runner in OpenShift, see the [Configuring GitLab Runner on OpenShift](../configuration/configuring_runner_operator.md) page. #### Monitoring To enable monitoring and metrics collection for GitLab Runner Operator deployments, see [Monitor GitLab Runner Operator](../monitoring/_index.md#monitor-operator-managed-gitlab-runners). ================================================ FILE: docs/install/osx.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Download, install, and configure GitLab Runner as a user-mode service on Apple Silicon and Intel x86-64 systems. title: Install GitLab Runner on macOS --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} Install GitLab Runner on macOS on Apple Silicon or Intel x86-64 systems. GitLab itself typically runs on a container or virtual machine, either locally or remotely. ## macOS service modes On macOS, GitLab Runner runs as a user-mode `LaunchAgent`, not as a system-level `LaunchDaemon`. This is the only supported mode. In user-mode, the runner: - Runs as the currently authenticated user, not as root. - Starts when that user signs in, and stops when they sign out. - Has access to the user's keychain and UI session, which is required to run the iOS Simulator and to perform code signing. - Stores its configuration in `~/.gitlab-runner/config.toml`. A system-level `LaunchDaemon` starts at boot, runs as root, and has no access to a user session. GitLab Runner does not support running as a `LaunchDaemon`. To keep the runner available after a reboot, turn on automatic login on the macOS machine. ## Install GitLab Runner Install GitLab Runner on macOS to run CI/CD jobs on Apple Silicon or Intel x86-64 systems. Prerequisites: - You must be signed in to the macOS machine as the user account that runs the jobs. Do not use an SSH session for this procedure. Use a local GUI terminal. To install GitLab Runner: 1. Download the binary for your system: - For Intel (x86-64): ```shell sudo curl --output /usr/local/bin/gitlab-runner \ "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-amd64" ``` - For Apple Silicon: ```shell sudo curl --output /usr/local/bin/gitlab-runner \ "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-arm64" ``` To download a binary for a specific tagged release, see [download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Make the binary executable: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. [Register a runner](../register/_index.md) configuration. Use the [shell executor](../executors/shell.md) for iOS and macOS builds. For security details, see [security for shell executor](../security/_index.md#usage-of-shell-executor). 1. Install and start the GitLab Runner service: ```shell cd ~ gitlab-runner install gitlab-runner start ``` 1. Reboot your system. The `gitlab-runner install` command creates a `LaunchAgent` plist at `~/Library/LaunchAgents/gitlab-runner.plist` and registers it with `launchctl`. If you encounter errors, see [troubleshooting](#troubleshooting). ## Configuration file locations | File | Path | |----------------------|--------------------------------------------------| | Configuration | `~/.gitlab-runner/config.toml` | | `LaunchAgent` plist | `~/Library/LaunchAgents/gitlab-runner.plist` | | Standard output log | `~/Library/Logs/gitlab-runner.out.log` | | Standard error log | `~/Library/Logs/gitlab-runner.err.log` | For more information about configuration options, see [advanced configuration](../configuration/advanced-configuration.md). ## Upgrade GitLab Runner To upgrade GitLab Runner to a newer version: 1. Stop the service: ```shell gitlab-runner stop ``` 1. Download the binary to replace the GitLab Runner executable: - For Intel (x86-64): ```shell sudo curl -o /usr/local/bin/gitlab-runner \ "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-amd64" ``` - For Apple Silicon: ```shell sudo curl -o /usr/local/bin/gitlab-runner \ "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-arm64" ``` To download a binary for a specific tagged release, see [download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Make the binary executable: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. Start the service: ```shell gitlab-runner start ``` ## Upgrade the service file To upgrade the `LaunchAgent` configuration, uninstall and reinstall the service: ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` ## Use `codesign` with GitLab Runner If you installed GitLab Runner with Homebrew and your build calls `codesign`, you might need to set `SessionCreate` to access the user keychain. > [!note] > GitLab does not maintain the Homebrew formula. Use the official binary to install GitLab Runner. In the following example, the runner runs builds as the `gitlab` user and needs access to that user's signing certificates: ```xml SessionCreate KeepAlive SuccessfulExit RunAtLoad Disabled Label com.gitlab.gitlab-runner UserName gitlab GroupName staff ProgramArguments /usr/local/opt/gitlab-runner/bin/gitlab-runner run --working-directory /Users/gitlab/gitlab-runner --config /Users/gitlab/gitlab-runner/config.toml --service gitlab-runner --syslog EnvironmentVariables PATH /usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin ``` ## Troubleshooting When installing GitLab Runner on macOS, you might encounter the following issues. For general troubleshooting, see [troubleshooting GitLab Runner](../faq/_index.md). ### Error: `killed: 9` On Apple Silicon, you might get this error when you run the `gitlab-runner install`, `gitlab-runner start`, or `gitlab-runner register` commands. To resolve this error, ensure the directories for `StandardOutPath` and `StandardErrorPath` in `~/Library/LaunchAgents/gitlab-runner.plist` exist and are writable. For example: ```xml StandardErrorPath /Users//gitlab-runner-log/gitlab-runner.err.log StandardOutPath /Users//gitlab-runner-log/gitlab-runner.out.log ``` ### Error: `"launchctl" failed: Could not find domain for` This error occurs when you manage the GitLab Runner service over SSH instead of a local GUI terminal. To resolve this error, open a terminal application directly on the macOS machine and run the `install` and `start` commands from there. ### Error: `Failed to authorize rights (0x1) with status: -60007` This error has two possible causes. Your user account does not have developer tools access. To grant access: ```shell DevToolsSecurity -enable sudo security authorizationdb remove system.privilege.taskport is-developer ``` Or, the `LaunchAgent` plist has `SessionCreate` set to `true`. To fix this issue, reinstall the service: ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` Verify that `~/Library/LaunchAgents/gitlab-runner.plist` now has `SessionCreate` set to `false`. ### Error: `Failed to connect to path port 3000: Operation timed out` The runner cannot reach your GitLab instance. Check for firewalls, proxies, routing configuration, or permission issues that might be blocking the connection. ### Error: `FATAL: Failed to start gitlab-runner: exit status 134` This error indicates the GitLab Runner service is not installed correctly. To resolve this error, reinstall the service: ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` If the error persists, sign in to the macOS GUI desktop instead of using SSH, and run the commands from a terminal there. The `LaunchAgent` requires a graphical login session to bootstrap. For macOS instances on AWS, follow the [AWS documentation](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/connect-to-mac-instance.html) to connect to the GUI, then retry from a terminal in that session. ### Error: `launchctl failed: Load failed: 5: Input/output error` If you encounter this error when you run the `gitlab-runner start` command, first check if the runner is already running: ```shell gitlab-runner status ``` If the runner is not running, ensure the directories for `StandardOutPath` and `StandardErrorPath` in `~/Library/LaunchAgents/gitlab-runner.plist` exist and that the runner's user account has read and write access to them. Then start the runner: ```shell gitlab-runner start ``` ### Error: `couldn't build CA Chain` This error can occur after upgrading to GitLab Runner v15.5.0. The full error message is: ```plaintext ERROR: Error on fetching TLS Data from API response... error error=couldn't build CA Chain: error while fetching certificates from TLS ConnectionState: error while fetching certificates into the CA Chain: couldn't resolve certificates chain from the leaf certificate: error while resolving certificates chain with verification: error while verifying last certificate from the chain: x509: "Baltimore CyberTrust Root" certificate is not permitted for this usage runner=x7kDEc9Q ``` To resolve this error: 1. Upgrade to GitLab Runner v15.5.1 or later. 1. If you cannot upgrade, set `FF_RESOLVE_FULL_TLS_CHAIN` to `false` in the [`[runners.feature_flags]` configuration](../configuration/feature-flags.md#enable-feature-flag-in-runner-configuration): ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" executor = "docker" [runners.feature_flags] FF_RESOLVE_FULL_TLS_CHAIN = false ``` ### Homebrew Git credential helper causes fetches to hang If Homebrew installed Git, it may have added a `credential.helper = osxkeychain` entry to `/usr/local/etc/gitconfig`. This caches credentials in the macOS keychain and can cause `git fetch` to hang. To remove the credential helper system-wide: ```shell git config --system --unset credential.helper ``` To disable it only for the GitLab Runner user: ```shell git config --global --add credential.helper '' ``` To check the current setting: ```shell git config credential.helper ``` ================================================ FILE: docs/install/requirements.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Software for CI/CD jobs. title: System requirements and supported platforms --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} ## Supported operating systems You can install GitLab Runner on: - Linux from a [GitLab repository](linux-repository.md) or [manually](linux-manually.md) - [FreeBSD](freebsd.md) - [macOS](osx.md) - [Windows](windows.md) - [z/OS](z-os.md) [Bleeding-edge binaries](bleeding-edge.md) are also available. To use a different operating system, ensure the operating system can compile a Go binary. ## Supported containers You can install GitLab Runner with: - [Docker](docker.md) - [The GitLab Helm chart](kubernetes.md) - [The GitLab agent for Kubernetes](kubernetes-agent.md) - [The GitLab Operator](operator.md) ## Supported architectures GitLab Runner is available for the following architectures: - x86 - AMD64 - ARM64 - ARM - s390x - ppc64le - riscv64 - loong64 ## System requirements The system requirements for GitLab Runner depend on the following considerations: - Anticipated CPU load of CI/CD jobs - Anticipated memory usage of CI/CD jobs - Number of concurrent CI/CD jobs - Number of projects in active development - Number of developers expected to work in parallel For more information about the machine types available for GitLab.com, see [GitLab-hosted runners](https://docs.gitlab.com/ci/runners/). ## FIPS-compliant GitLab Runner A GitLab Runner binary compliant with FIPS 140-2 is available for Red Hat Enterprise Linux (RHEL) distributions and the AMD64 architecture. Support for other distributions and architectures is proposed in [issue 28814](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28814). This binary is built with the [Red Hat Go compiler](https://developers.redhat.com/blog/2019/06/24/go-and-fips-140-2-on-red-hat-enterprise-linux) and calls into a FIPS 140-2 validated cryptographic library. A [UBI-8 minimal image](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html-single/building_running_and_managing_containers/index#con_understanding-the-ubi-minimal-images_assembly_types-of-container-images) is used as the base for creating the GitLab Runner FIPS image. For more information about using FIPS-compliant GitLab Runner in RHEL, see [Switching RHEL to FIPS mode](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html/security_hardening/switching-rhel-to-fips-mode_security-hardening). ================================================ FILE: docs/install/step-runner.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install step runner manually to use GitLab Functions title: Install step runner manually --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} The step runner is a binary that allows GitLab Runner to execute GitLab Functions on executors without native functions support. For these executors, you must install the step runner binary on the host or container where your jobs run before you can use functions in your pipelines. ## Executors that require manual step runner installation Whether you need to install step-runner manually depends on your executor. The following table shows which executors require you to install step runner manually: | Executor | Manual installation required | |-------------------|------------------------------| | Shell | Yes | | SSH | Yes | | Kubernetes | Yes | | VirtualBox | Yes | | Parallels | Yes | | Custom | Yes | | Instance | Yes | | Docker | Only on Windows | | Docker Autoscaler | Only on Windows | | Docker Machine | Only on Windows | For executors that don't require manual installation, `gitlab-runner-helper` acts as the step runner. The `step-runner` binary is neither present nor required on these executors. ### Variable access restrictions On executors where you install step runner manually, the step runner has restricted access to job variables and environment variables: | Syntax | Available values | |----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `${{ vars. }}` | Job variables with the prefix `CI_`, `DOCKER_`, or `GITLAB_` only. | | `${{ env. }}` | `HTTPS_PROXY`, `HTTP_PROXY`, `NO_PROXY`, `http_proxy`, `https_proxy`, `no_proxy`, `all_proxy`, `LANG`, `LC_ALL`, `LC_CTYPE`, `LOGNAME`, `USER`, `PATH`, `SHELL`, `TERM`, `TMPDIR`, `TZ` | ## Install step runner manually Pre-compiled binaries for multiple platforms are available from the [step runner releases page](https://gitlab.com/gitlab-org/step-runner/-/releases). Supported platforms include Windows, Linux, macOS, and FreeBSD across multiple architectures (amd64, arm64, 386, arm, s390x, ppc64le). ### Verify authenticity of the binary Before you install, verify that the binary hasn't been tampered with and comes from the official GitLab team. 1. Download and import the GPG public key: ```shell # All platforms (requires gpg installed: https://gnupg.org/download/) curl -o step-runner.pub.gpg "https://gitlab.com/gitlab-org/step-runner/-/package_files/257922684/download" gpg --import step-runner.pub.gpg gpg --fingerprint ``` Verify the imported key matches the following: | Key attribute | Value | |---------------|------------------------------------------------------| | Name | `GitLab, Inc.` | | Email | `support@gitlab.com` | | Fingerprint | `0FCD 59B1 6F4A 62D0 3839 27A5 42FF CA71 62A5 35F5` | | Expiry | `2029-01-05` | 1. From the [releases page](https://gitlab.com/gitlab-org/step-runner/-/releases), download the following files: - The binary for your platform (for example, `step-runner-linux-amd64` or `step-runner-darwin-arm64`) - `step-runner-release.sha256` - `step-runner-release.sha256.asc` 1. Verify the GPG signature: ```shell # All platforms (requires gpg) gpg --verify step-runner-release.sha256.asc step-runner-release.sha256 ``` The output should include a `Good signature` message. 1. Verify the binary checksum: ```shell # Linux sha256sum -c step-runner-release.sha256 ``` ```shell # macOS shasum -a 256 -c step-runner-release.sha256 ``` ```shell # Windows (PowerShell) — replace 'step-runner-windows-amd64.exe' with your binary name $binary = "step-runner-windows-amd64.exe" $expected = (Select-String -Path "step-runner-release.sha256" -Pattern $binary).Line.Split(" ")[0] $actual = (Get-FileHash -Algorithm SHA256 $binary).Hash.ToLower() if ($actual -eq $expected) { "OK" } else { "FAILED: checksum mismatch" } ``` The output should show `OK` for your binary. ### Add step-runner to PATH After you download and verify the binary, make it available on the `PATH` of the instance where your jobs run. This instance might be the host machine or a container, depending on your executor. 1. Rename the binary to `step-runner` (or `step-runner.exe` on Windows): ```shell mv step-runner-- step-runner ``` 1. On Unix-like systems, make the binary executable: ```shell chmod +x step-runner ``` 1. Move the binary to a directory on your `PATH`: ```shell mv step-runner /usr/local/bin/ ``` ================================================ FILE: docs/install/support-policy.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner support policy --- The support policy by GitLab Runner is determined by the lifecycle policy of the operating system. ## Container images support We follow the support lifecycle of the base distributions (Ubuntu, Alpine, Red Hat Universal Base Image) used for creating the GitLab Runner container images. The end-of-publishing dates for the base distributions will not necessarily align with the GitLab major release cycle. This means we will stop publishing a version of the GitLab Runner container image in a minor release. This ensures that we do not publish images that the upstream distribution no longer updates. ### Container images and end of publishing date | Base container | Base container version | Vendor EOL date | GitLab EOL date | |--------------------------------|------------------------|-----------------|-----------------| | Ubuntu | 24.04 | 2027-04-30 | 2027-05-20 | | Ubuntu | 20.04 | 2025-05-31 | 2025-06-19 | | Alpine | 3.12 | 2022-05-01 | 2023-05-22 | | Alpine | 3.13 | 2022-11-01 | 2023-05-22 | | Alpine | 3.14 | 2023-05-01 | 2023-05-22 | | Alpine | 3.15 | 2023-11-01 | 2024-01-18 | | Alpine | 3.16 | 2024-05-23 | 2024-06-22 | | Alpine | 3.17 | 2024‑11‑22 | 2024-12-22 | | Alpine | 3.18 | 2025‑05‑09 | 2025-05-22 | | Alpine | 3.19 | 2025‑11‑01 | 2025-11-22 | | Alpine | 3.21 | 2026‑11‑01 | 2026-11-22 | | Alpine | latest | | | | Red Hat Universal Base Image 9 | 9.5 | 2025-04-31 | 2025-05-22 | GitLab Runner versions 17.7 and later support only a single Alpine version (`latest`) instead of specific versions. Alpine versions 3.21 will be supported to the stated EOL date. In contrast, Ubuntu 24.04 will be supported to its EOL date, at which point we will move to the most recent LTS release. ## Windows version support GitLab officially supports LTS versions of Microsoft Windows operating systems and so we follow the Microsoft [Servicing Channels](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#servicing-channels) lifecycle policy. This means that we support: - [Long-Term Servicing Channel](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#long-term-servicing-channel) versions for five years after their release date. After five years, Microsoft offers extended support for an additional five years. During this extended period, we offer support for as long as is practical. We can end this support, with announcement, on a GitLab major release. - [Semi-Annual Channel](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#semi-annual-channel) versions for 18 months after their release date. We don't support these versions after mainstream support ends. This support policy applies to the [Windows binaries](windows.md#installation) that we distribute and the [Docker executor](../executors/docker.md#supported-windows-versions). > [!note] > The Docker executor for Windows containers has strict version > requirements, because containers have to match the version of the host > OS. See the [list of supported Windows containers](../executors/docker.md#supported-windows-versions) > for more information. As a single source of truth, we use , which specifies the release, mainstream, and extended support dates. Below is a list of versions that are commonly used and their end of life date: | Operating system | Mainstream support end date | Extended support end date | |----------------------------|-----------------------------|---------------------------| | Windows Server 2019 (1809) | January 2024 | January 2029 | | Windows Server 2022 (21H2) | October 2026 | October 2031 | | Windows Server 2025 (24H2) | October 2029 | October 2034 | ### Future releases Microsoft releases new Windows Server products in the [Semi-Annual Channel](https://learn.microsoft.com/en-us/windows-server/get-started/servicing-channels-comparison#semi-annual-channel) twice a year, and every 2 - 3 years a new major version of Windows Sever is released in the [Long-Term Servicing Channel (LTSC)](https://learn.microsoft.com/en-us/windows-server/get-started/servicing-channels-comparison#long-term-servicing-channel-ltsc). GitLab aims to test and release new GitLab Runner helper images that include the latest Windows Server version (Semi-Annual Channel) within 1 month of the official Microsoft release date on the Google Cloud Platform. Refer to the [Windows Server current versions by servicing option list](https://learn.microsoft.com/en-us/windows/release-health/windows-server-release-info#windows-server-current-versions-by-servicing-option) for availability dates. ================================================ FILE: docs/install/windows.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner on Windows systems. title: Install GitLab Runner on Windows --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} To install and run GitLab Runner on Windows you need: - Git, which can be installed from the [official site](https://git-scm.com/download/win) - A password for your user account, if you want to run it under your user account rather than the Built-in System Account. - The system locale set to English (United States) to avoid character encoding issues. For more information, see [issue 38702](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38702). ## Installation 1. Create a folder somewhere in your system, for example, `C:\GitLab-Runner`. 1. Download the binary for [x86 64-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-amd64.exe), [ARM 64-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-arm64.exe) or [x86 32-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-386.exe) and put it into the folder you created. The following assumes you have renamed the binary to `gitlab-runner.exe` (optional). You can download a binary for every available version as described in [Bleeding Edge - download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Make sure to restrict the `Write` permissions on the GitLab Runner directory and executable. If you do not set these permissions, regular users can replace the executable with their own and run arbitrary code with elevated privileges. 1. Run an [elevated command prompt](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator): 1. [Register a runner](../register/_index.md). 1. Install GitLab Runner as a service and start it. You can either run the service using the Built-in System Account (recommended) or using a user account. > [!note] > Windows services do not provide interactive desktop sessions. To run GUI or desktop automation > tests, see [GUI tests and interactive desktop sessions](#gui-tests-and-interactive-desktop-sessions). **Run service using Built-in System Account** (under the example directory created in step 1, `C:\GitLab-Runner`) ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe install .\gitlab-runner.exe start ``` **Run service using user account** (under the example directory created in step 1, `C:\GitLab-Runner`) You have to enter a valid password for the current user account, because it's required to start the service by Windows: ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe install --user ENTER-YOUR-USERNAME --password ENTER-YOUR-PASSWORD .\gitlab-runner.exe start ``` See the [troubleshooting section](#windows-troubleshooting) if you encounter any errors during the GitLab Runner installation. 1. (Optional) Update the runner's `concurrent` value in `C:\GitLab-Runner\config.toml` to allow multiple concurrent jobs as detailed in [advanced configuration details](../configuration/advanced-configuration.md). Additionally, you can use the advanced configuration details to update your shell executor to use Bash or PowerShell rather than Batch. Voila! Runner is installed, running, and starts again after each system reboot. Logs are stored in Windows Event Log. ## Upgrade 1. Stop the service (you need an [elevated command prompt](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator) as before): ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe stop ``` 1. Download the binary for [x86 64-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-amd64.exe), [ARM 64-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-arm64.exe) or [x86 32-bit](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-386.exe) and replace runner's executable. You can download a binary for every available version as described in [Bleeding Edge - download any other tagged release](bleeding-edge.md#download-any-other-tagged-release). 1. Start the service: ```powershell .\gitlab-runner.exe start ``` ## Uninstall From an [elevated command prompt](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator): ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe stop .\gitlab-runner.exe uninstall cd .. rmdir /s GitLab-Runner ``` ## Windows troubleshooting Make sure that you read the [FAQ](../faq/_index.md) section which describes some of the most common problems with GitLab Runner. If you encounter an error like _The account name is invalid_, try: ```powershell # Add \. before the username .\gitlab-runner.exe install --user ".\ENTER-YOUR-USERNAME" --password "ENTER-YOUR-PASSWORD" ``` If you encounter a `The service did not start due to a logon failure` error while starting the service, see the [FAQ section](#error-the-service-did-not-start-due-to-a-logon-failure) to check how to resolve the problem. If you don't have a Windows Password, you cannot start the GitLab Runner service but you can use the Built-in System Account. For Built-in System Account issues, see [Configure the Service to Start Up with the Built-in System Account](https://learn.microsoft.com/en-us/troubleshoot/windows-server/system-management-components/service-startup-permissions#resolution-3-configure-the-service-to-start-up-with-the-built-in-system-account) on the Microsoft support website. ### Get runner logs When you run `.\gitlab-runner.exe install` it installs `gitlab-runner` as a Windows service. You can find the logs in the Event Viewer with the provider name `gitlab-runner`. If you don't have access to the GUI, in PowerShell, you can run [`Get-WinEvent`](https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.diagnostics/get-winevent?view=powershell-7.4). ```shell PS C:\> Get-WinEvent -ProviderName gitlab-runner ProviderName: gitlab-runner TimeCreated Id LevelDisplayName Message ----------- -- ---------------- ------- 2/4/2025 6:20:14 AM 1 Information [session_server].listen_address not defined, session endpoints disabled builds=0... 2/4/2025 6:20:14 AM 1 Information listen_address not defined, metrics & debug endpoints disabled builds=0... 2/4/2025 6:20:14 AM 1 Information Configuration loaded builds=0... 2/4/2025 6:20:14 AM 1 Information Starting multi-runner from C:\config.toml... builds=0... ``` ### GUI tests and interactive desktop sessions Windows GUI test tools (like Ranorex and desktop automation frameworks) require an interactive user session with access to the visible desktop. This is a known platform limitation. For details, see [issue 1046](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1046). When GitLab Runner runs only as a Windows service: - Jobs execute in a non-interactive session. - Jobs cannot access the visible desktop. - GUI tests fail or hang. To run GUI or desktop automation tests: 1. Use the `shell` executor. Docker and Kubernetes executors on Windows do not provide an interactive desktop session. 1. Sign in to Windows with the user account for the interactive session. 1. Start GitLab Runner as a foreground process in that session instead of using the service: ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe run ``` 1. Keep the user session active for as long as GUI tests run. 1. Use tags in your `.gitlab-ci.yml` file to send GUI test jobs to this runner: ```yaml gui_tests: stage: test tags: - windows-gui script: - .\run-gui-tests.ps1 ``` Autoscaled or ephemeral Windows runners cannot run GUI tests because they do not support interactive desktop sessions. Each job runs on a freshly provisioned VM with no logged-in user, so there is no visible desktop for GUI automation to target. ### I get a `PathTooLongException` during my builds on Windows This error is caused by tools like `npm` which sometimes generate directory structures with paths more than 260 characters in length. To solve the problem, adopt one of the following solutions. - Use Git with `core.longpaths` enabled: You can avoid the problem by using Git to clean your directory structure. 1. Run `git config --system core.longpaths true` from the command line. 1. Set your project to use `git fetch` from the GitLab CI project settings page. - Use NTFSSecurity tools for PowerShell: The [NTFSSecurity](https://github.com/raandree/NTFSSecurity) PowerShell module provides a `Remove-Item2` method which supports long paths. GitLab Runner detects it if it is available and automatically make use of it. > A regression introduced in GitLab Runner 16.9.1 is fixed in GitLab Runner 17.10.0. > If you intend to use the GitLab Runner versions with regressions, use one of the following workarounds: > > - Use `pre_get_sources_script` to re-enable Git system-level settings (by unsetting `Git_CONFIG_NOSYSTEM`). > This action enables `core.longpaths` by default on Windows. > > ```yaml > build: > hooks: > pre_get_sources_script: > - $env:GIT_CONFIG_NOSYSTEM='' > ``` > > - Build a custom `GitLab-runner-helper` image: > > ```dockerfile > FROM registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-v17.8.3-servercore21H2 > ENV GIT_CONFIG_NOSYSTEM= > ``` ### Error with Windows batch scripts: `The system cannot find the batch label specified - buildscript` You need to prepend `call` to your Batch file line in `.gitlab-ci.yml` so that it looks like `call C:\path\to\test.bat`. For example: ```yaml before_script: - call C:\path\to\test.bat ``` For more information, see [issue 1025](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1025). ### How can I get colored output on the web terminal? **Short answer**: Make sure that you have the ANSI color codes in your program's output. For the purposes of text formatting, assume that you're running in a UNIX ANSI terminal emulator (because it is the web interface output). **Long Answer**: The web interface for GitLab CI emulates a UNIX ANSI terminal (at least partially). The `gitlab-runner` pipes any output from the build directly to the web interface. That means that any ANSI color codes that are present are honored. Older versions of Windows' command prompt terminal (before Windows 10, version 1511) do not support ANSI color codes. They use win32 ([`ANSI.SYS`](https://en.wikipedia.org/wiki/ANSI.SYS)) calls instead which are **not** present in the string to be displayed. When writing cross-platform programs, developers typically use ANSI color codes by default. These codes are converted to win32 calls when running on a Windows system, for example, [Colorama](https://pypi.org/project/colorama/). If your program is doing the above, you must disable that conversion for the CI builds so that the ANSI codes remain in the string. For more information, see [GitLab CI YAML documentation](https://docs.gitlab.com/ci/yaml/script/#add-color-codes-to-script-output) for an example using PowerShell and [issue 332](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/332). ### Error: `The service did not start due to a logon failure` When installing and starting the GitLab Runner service on Windows you can meet with such error: ```shell gitlab-runner install --password WINDOWS_MACHINE_PASSWORD gitlab-runner start FATA[0000] Failed to start GitLab Runner: The service did not start due to a logon failure. ``` This error can occur when the user used to execute the service doesn't have the `SeServiceLogonRight` permission. In this case, you need to add this permission for the chosen user and then try to start the service again. 1. Go to **Control Panel > System and Security > Administrative Tools**. 1. Open the **Local Security Policy** tool. 1. Select **Security Settings > Local Policies > User Rights Assignment** on the list on the left. 1. Open the **Log on as a service** on the list on the right. 1. Select **Add User or Group...**. 1. Add the user ("by hand" or using **Advanced...**) and apply the settings. According to [Microsoft documentation](https://learn.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/dn221981(v=ws.11)), this should work for: - Windows Vista - Windows Server 2008 - Windows 7 - Windows 8.1 - Windows Server 2008 R2 - Windows Server 2012 R2 - Windows Server 2012 - Windows 8 The Local Security Policy tool may be not available in some Windows versions, for example in "Home Edition" variant of each version. After adding the `SeServiceLogonRight` for the user used in service configuration, the command `gitlab-runner start` should finish without failures and the service should be started properly. ### Job marked as success or failed incorrectly Most Windows programs output `exit code 0` for success. However, some programs don't return an exit code or have a different value for success. An example is the Windows tool `robocopy`. The following `.gitlab-ci.yml` fails, even though it should be successful, due to the exit code output by `robocopy`: ```yaml test: stage: test script: - New-Item -type Directory -Path ./source - New-Item -type Directory -Path ./dest - Write-Output "Hello World!" > ./source/file.txt - robocopy ./source ./dest tags: - windows ``` In the case above, you need to manually add an exit code check to the `script:`. For example, you can create a PowerShell script: ```powershell $exitCodes = 0,1 robocopy ./source ./dest if ( $exitCodes.Contains($LastExitCode) ) { exit 0 } else { exit 1 } ``` And change the `.gitlab-ci.yml` file to: ```yaml test: stage: test script: - New-Item -type Directory -Path ./source - New-Item -type Directory -Path ./dest - Write-Output "Hello World!" > ./source/file.txt - ./robocopyCommand.ps1 tags: - windows ``` Also, be careful of the difference between `return` and `exit` when using PowerShell functions. While `exit 1` marks a job as failed, `return 1` does not. ### Job marked as success and terminated midway using Kubernetes executor For more information, see [Job execution](../executors/kubernetes/_index.md#job-execution). ### Docker executor: `unsupported Windows Version` GitLab Runner checks the version of Windows Server to verify that it's supported. It does this by running `docker info`. If GitLab Runner fails to start and displays an error without specifying a Windows Server version, then the Docker version might be outdated. ```plaintext Preparation failed: detecting base image: unsupported Windows Version: Windows Server Datacenter ``` The error should contain detailed information about the Windows Server version, which is then compared with the versions that GitLab Runner supports. ```plaintext unsupported Windows Version: Windows Server Datacenter Version (OS Build 18363.720) ``` Docker 17.06.2 on Windows Server returns the following in the output of `docker info`. ```plaintext Operating System: Windows Server Datacenter ``` The fix in this case is to upgrade the Docker version of similar age, or later, than the Windows Server release. ### Kubernetes executor: `unsupported Windows Version` Kubernetes executor on Windows might fail with the following error: ```plaintext Using Kubernetes namespace: gitlab-runner ERROR: Preparation failed: prepare helper image: detecting base image: unsupported Windows Version: Will be retried in 3s ... ERROR: Job failed (system failure): prepare helper image: detecting base image: unsupported Windows Version: ``` To fix it, add `node.kubernetes.io/windows-build` node selector in the section `[runners.kubernetes.node_selector]` of your GitLab Runner configuration file, For example: ```toml [runners.kubernetes.node_selector] "kubernetes.io/arch" = "amd64" "kubernetes.io/os" = "windows" "node.kubernetes.io/windows-build" = "10.0.17763" ``` ### I'm using a mapped network drive and my build cannot find the correct path When GitLab Runner runs under a standard user account instead of an administrator account, it cannot access mapped network drives. When you try to use mapped network drives, you get the `The system cannot find the path specified.` error. This error occurs because service logon sessions have [security limitations](https://learn.microsoft.com/en-us/windows/win32/services/services-and-redirected-drives) when accessing resources. Use the [UNC path](https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#unc-paths) of your drive instead. ### The build container is unable to connect to service containers To use services with Windows containers: - Use the networking mode that [creates a network for each job](../executors/docker.md#create-a-network-for-each-job). - Ensure that the `FF_NETWORK_PER_BUILD` feature flag is enabled. ### The job cannot create a build directory and fails with an error When you use the `GitLab-Runner` with the `Docker-Windows` executor, a job might fail with an error like: ```shell fatal: cannot chdir to c:/builds/gitlab/test: Permission denied` ``` When this error occurs, ensure the user the Docker engine is running as has full permissions to `C:\Program Data\Docker`. The Docker engine must be able to write to this directory for certain actions, and without the correct permissions it fails. [Read more about configuring Docker Engine on Windows](https://learn.microsoft.com/en-us/virtualization/windowscontainers/manage-docker/configure-docker-daemon). ### Blank lines for Windows Subsystem for Linux (WSL) STDOUT output in job logs By default the STDOUT output for the Windows Subsystem for Linux (WSL) is not UTF8 encoded and displays as blank lines in the job logs. To display the STDOUT output, you can force UTF8 encoding for WSL by setting the `WSL_UTF8` environment variable. ```yaml job: variables: WSL_UTF8: "1" ``` ### Display resolution is limited to 1024x768 When you run CI/CD Jobs on Windows with GitLab Runner as a system service, the display resolution is limited to 1024x768. This issue is due to Windows Session 0 isolation. For more information, see [Session 0 Isolation](https://learn.microsoft.com/en-us/previous-versions/bb756986(v=msdn.10)?redirectedfrom=MSDN). To verify session and display resolution, run the following PowerShell script in a job: ```powershell echo "Current session:" [System.Diagnostics.Process]::GetCurrentProcess().SessionId Add-Type -AssemblyName System.Windows.Forms [System.Windows.Forms.Screen]::AllScreens ``` Here's the output of the script when running in the isolates session 0: ```plaintext Current session: 0 BitsPerPixel : 0 Bounds : {X=0,Y=0,Width=1024,Height=768} DeviceName : WinDisc Primary : True WorkingArea : {X=0,Y=0,Width=1024,Height=768} ``` ================================================ FILE: docs/install/z-os.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Install GitLab Runner manually on z/OS. title: Install GitLab Runner manually on z/OS --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner for IBM z/OS has been certified by GitLab and can run CI/CD jobs natively on z/OS mainframe environments. You can download and install GitLab Runner on z/OS manually from a [`pax`](https://www.ibm.com/docs/en/aix/7.1.0?topic=p-pax-command) archive. ## Prerequisites - To use GitLab Runner, you need the following authorized program analysis reports (`APARs`) with program temporary fixes (`PTFs`): - z/OS 2.5 - OA62757 - PH45182 - z/OS 3.1 - OA62757 - PH57159 - GitLab Runner expects bash to be installed at `/bin/bash` to execute shell commands. If bash is not installed at this location, create a symlink to the installed version: ```shell ln -s /bin/bash ``` ## Install GitLab Runner To install GitLab Runner: 1. Download the `paxfile` into your chosen install directory. 1. Install the package for your system: ```shell pax -ppx -rf gitlab-runner-.pax.Z ``` The installed files are unpacked to the `gitlab-runner` directory in the install location. 1. Give the file permissions to execute: ```shell chmod +x /bin/gitlab-runner ``` 1. Export GitLab Runner and add it to your `PATH`: ```shell export GITLAB_RUNNER=/gitlab-runner/bin export PATH=${GITLAB_RUNNER}:${PATH} ``` 1. [Register a runner](../register/_index.md). ## Run GitLab Runner You can run GitLab Runner directly or as a started task. ### Run GitLab Runner directly To run GitLab Runner by calling the executable: 1. Go to the directory `/bin`. 1. Start the service: ```shell gitlab-runner start ``` ### Run GitLab Runner as a started task To keep the GitLab Runner process available, run it as a started task. 1. Wrap the executable in a shell script `gitlab-runner.sh`: ```shell #! /bin/sh /bin/gitlab-runner start ``` 1. Define a `jcl` started task program and execute it to run as an ongoing process: ```jcl //GLRST PROC CNFG='' //* //GLRST EXEC PGM=BPXBATSL,REGION=0M,TIME=NOLIMIT, // PARM='PGM &CNFG./gitlab-runner.sh' //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* //* // PEND ``` ================================================ FILE: docs/monitoring/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: Prometheus metrics. title: Monitor GitLab Runner usage --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner can be monitored using [Prometheus](https://prometheus.io). ## Embedded Prometheus metrics GitLab Runner includes native Prometheus metrics, which you can expose using an embedded HTTP server on the `/metrics` path. The server - if enabled - can be scraped by the Prometheus monitoring system or accessed with any other HTTP client. The exposed information includes: - Runner business logic metrics (for example, the number of jobs running at the moment) - Go-specific process metrics (for example, garbage collection stats, goroutines, and memstats) - general process metrics (memory usage, CPU usage, file descriptor usage, etc.) - build version information The metrics format is documented in Prometheus' [Exposition formats](https://prometheus.io/docs/instrumenting/exposition_formats/) specification. These metrics are meant as a way for operators to monitor and gain insight into your runners. For example, you might want to know if an increase in load average on the runner host is related to an increase in processed jobs. Or perhaps you are running a cluster of machines, and you want to track build trends so you can make changes to your infrastructure. ### Learning more about Prometheus To set up Prometheus server to scrape this HTTP endpoint and use the collected metrics, see Prometheus's [getting started](https://prometheus.io/docs/prometheus/latest/getting_started/) guide. For more details on how to configure Prometheus, see the [configuration](https://prometheus.io/docs/prometheus/latest/configuration/configuration/) section. For more details about alert configuration, see [alerting rules](https://prometheus.io/docs/prometheus/latest/configuration/alerting_rules/) and [Alertmanager](https://prometheus.io/docs/alerting/latest/alertmanager/). ## Available metrics To find a full list of all available metrics, `curl` the metrics endpoint after it is configured and enabled. For example, for a local runner configured with listening port `9252`: ```shell $ curl -s "http://localhost:9252/metrics" | grep -E "# HELP" # HELP gitlab_runner_api_request_statuses_total The total number of api requests, partitioned by runner, endpoint and status. # HELP gitlab_runner_autoscaling_machine_creation_duration_seconds Histogram of machine creation time. # HELP gitlab_runner_autoscaling_machine_states The current number of machines per state in this provider. # HELP gitlab_runner_concurrent The current value of concurrent setting # HELP gitlab_runner_errors_total The number of caught errors. # HELP gitlab_runner_limit The current value of limit setting # HELP gitlab_runner_request_concurrency The current number of concurrent requests for a new job # HELP gitlab_runner_request_concurrency_exceeded_total Count of excess requests above the configured request_concurrency limit # HELP gitlab_runner_version_info A metric with a constant '1' value labeled by different build stats fields. ... ``` The list includes [Go-specific process metrics](https://github.com/prometheus/client_golang/blob/v1.19.0/prometheus/go_collector.go). For a list of available metrics that do not include Go-specific processes, see [Monitoring runners](../fleet_scaling/_index.md#monitoring-runners). ## `pprof` HTTP endpoints The internal state of the GitLab Runner process through metrics is valuable, but in some cases you must examine the Running process in real time. That's why we've introduced the `pprof` HTTP endpoints. `pprof` endpoints are available through an embedded HTTP server on `/debug/pprof/` path. You can read more about using `pprof` in its [documentation](https://pkg.go.dev/net/http/pprof). ## Configuration of the metrics HTTP server > [!note] > The metrics server exports data about the internal state of the > GitLab Runner process and should not be publicly available! Configure the metrics HTTP server by using one of the following methods: - Use the `listen_address` global configuration option in the `config.toml` file. - Use the `--listen-address` command line option for the `run` command. - For runners using Helm chart, in the `values.yaml`: 1. Configure the `metrics` option: ```yaml ## Configure integrated Prometheus metrics exporter ## ## ref: https://docs.gitlab.com/runner/monitoring/#configuration-of-the-metrics-http-server ## metrics: enabled: true ## Define a name for the metrics port ## portName: metrics ## Provide a port number for the integrated Prometheus metrics exporter ## port: 9252 ## Configure a prometheus-operator serviceMonitor to allow automatic detection of ## the scraping target. Requires enabling the service resource below. ## serviceMonitor: enabled: true ... ``` 1. Configure the `service` monitor to retrieve the configured `metrics`: ```yaml ## Configure a service resource to allow scraping metrics by using ## prometheus-operator serviceMonitor service: enabled: true ## Provide additional labels for the service ## labels: {} ## Provide additional annotations for the service ## annotations: {} ... ``` If you add the address to your `config.toml` file, to start the metrics HTTP server, you must restart the runner process. In both cases the option accepts a string with the format `[host]:`, where: - `host` can be an IP address or a hostname, - `port` is a valid TCP port or symbolic service name (like `http`). You should use port `9252` which is already [allocated in Prometheus](https://github.com/prometheus/prometheus/wiki/Default-port-allocations). If the listen address does not contain a port, it defaults to `9252`. Examples of addresses: - `:9252` listens on all interfaces on port `9252`. - `localhost:9252` listens on the loopback interface on port `9252`. - `[2001:db8::1]:http` listens on IPv6 address `[2001:db8::1]` on the HTTP port `80`. Remember that for listening on ports below `1024` - at least on Linux/Unix systems - you need to have root/administrator privileges. The HTTP server is opened on the selected `host:port` **without any authorization**. If you bind the metrics server to a public interface, use your firewall to limit access or add an HTTP proxy for authorization and access control. ## Monitor Operator managed GitLab Runners GitLab Runners managed by the GitLab Runner Operator use the same embedded Prometheus metrics server as standalone GitLab Runner instances. The metrics server is preconfigured with `listenAddr` set to `[::]:9252`, which listens on all IPv6 and IPv4 interfaces on port `9252`. ### Expose metrics port To enable monitoring and metrics collection for GitLab Runners managed by the GitLab Runner Operator, see [Monitor Operator managed GitLab Runners](#monitor-operator-managed-gitlab-runners). #### Configure the metrics port Add the following patch to the `podSpec` field in your runner configuration: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: gitlab-runner spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret buildImage: alpine podSpec: name: "metrics-config" patch: | { "containers": [ { "name": "runner", "ports": [ { "name": "metrics", "containerPort": 9252, "protocol": "TCP" } ] } ] } patchType: "strategic" ``` This configuration: - `name`: Assigns a name to the custom `PodSpec` for identification. - `patch`: Defines the JSON patch to apply to the `PodSpec`, exposes port `9252` on the runner container. - `patchType`: Uses the `strategic` merge strategy (default) to apply the patch. - `port`: Named as `metrics` for easy identification in Kubernetes services. #### Configure Prometheus scraping For environments using Prometheus Operator, create a `PodMonitor` resource to directly scrape metrics from runner pods: ```yaml apiVersion: monitoring.coreos.com/v1 kind: PodMonitor metadata: name: gitlab-runner-metrics namespace: kube-prometheus-stack labels: release: kube-prometheus-stack spec: selector: matchLabels: app.kubernetes.io/component: runner namespaceSelector: matchNames: - gitlab-runner-system podMetricsEndpoints: - port: metrics interval: 10s path: /metrics ``` Apply the `PodMonitor` configuration: ```shell kubectl apply -f gitlab-runner-podmonitor.yaml ``` The `PodMonitor` configuration: - `selector`: Matches pods with the `app.kubernetes.io/component: runner` label. - `namespaceSelector`: Limits scraping to the `gitlab-runner-system` namespace. - `podMetricsEndpoints`: Defines the metrics port, scrape interval, and path. #### Add runner identification to metrics To add runner identification to all exported metrics, include relabel configuration in the `PodMonitor`: ```yaml podMetricsEndpoints: - port: metrics interval: 10s path: /metrics relabelings: - sourceLabels: [__meta_kubernetes_pod_label_app_kubernetes_io_name] targetLabel: runner_name ``` The relabel configuration: - Extracts the `app.kubernetes.io/name` label from each runner pod (automatically set by GitLab Runner Operator). - Adds it as a `runner_name` label to all metrics from that pod. - Enables filter and aggregation metrics by specific runner instances. The following is an example metrics with runner identification: ```prometheus gitlab_runner_concurrent{runner_name="my-gitlab-runner"} 10 gitlab_runner_jobs_running_total{runner_name="my-gitlab-runner"} 3 ``` #### Direct Prometheus scrape configuration If you're not using Prometheus Operator, you can add the relabel configuration directly in the Prometheus scrape configuration: ```yaml scrape_configs: - job_name: 'gitlab-runner-operator' kubernetes_sd_configs: - role: pod namespaces: names: - gitlab-runner-system relabel_configs: - source_labels: [__meta_kubernetes_pod_label_app_kubernetes_io_name] target_label: runner_name metrics_path: /metrics scrape_interval: 10s ``` This configuration: - Uses Kubernetes service discovery to find pods in the `gitlab-runner-system` namespace. - Extracts the `app.kubernetes.io/name` label and adds it as `runner_name` to metrics. ## Monitor GitLab Runner with executors other than Kubernetes For GitLab Runner deployments with executors other than Kubernetes, you can add runner identification through external labels in your Prometheus configuration. ### Static configuration with external labels Configure Prometheus to scrape your GitLab Runner instances and add identifying labels: ```yaml scrape_configs: - job_name: 'gitlab-runner' static_configs: - targets: ['runner1.example.com:9252'] labels: runner_name: 'production-runner-1' - targets: ['runner2.example.com:9252'] labels: runner_name: 'staging-runner-1' metrics_path: /metrics scrape_interval: 30s ``` This configuration adds runner identification to your metrics: ```prometheus gitlab_runner_concurrent{runner_name="production-runner-1"} 10 gitlab_runner_jobs_running_total{runner_name="staging-runner-1"} 3 ``` This configuration enables you to: - Filter metrics by specific runner instances. - Create runner-specific dashboards and alerts. - Track performance across different runner deployments. ### Available metrics for Operator managed GitLab Runners GitLab Runners managed by the GitLab Runner Operator expose the same metrics as standalone GitLab Runner deployments. To view all available metrics, use `kubectl` to access the metrics endpoint: ```shell kubectl port-forward pod/ 9252:9252 curl -s "http://localhost:9252/metrics" | grep -E "# HELP" ``` For a complete list of available metrics, see [Available metrics](#available-metrics). ### Security considerations for Operator managed GitLab Runners When you configure the metrics collection for GitLab Runners managed by the GitLab Runner Operator: - Use Kubernetes `NetworkPolicies` to restrict access to authorized monitoring systems. - Consider using `mutal` TLS encryption for metric scraping in production environments. ### Troubleshooting Operator managed GitLab Runner monitoring #### Metrics endpoint not accessible If you cannot access the metrics endpoint: 1. Verify that the pod specification includes the metrics port configuration. 1. Ensure that the runner pod is running and healthy: ```shell kubectl get pods -l app.kubernetes.io/component=runner -n gitlab-runner-system kubectl describe pod -n gitlab-runner-system ``` 1. Test the connectivity to the metrics endpoint: ```shell kubectl port-forward pod/ 9252:9252 -n gitlab-runner-system curl "http://localhost:9252/metrics" ``` #### Missing metrics in Prometheus If metrics are not appearing in Prometheus: 1. Verify that the `PodMonitor` is correctly configured and applied. 1. Check that the namespace and label selectors match your runner pods. 1. Review Prometheus logs for scraping errors. 1. Validate that the `PodMonitor` is discoverable by Prometheus Operator: ```shell kubectl get podmonitor gitlab-runner-metrics -n kube-prometheus-stack kubectl describe podmonitor gitlab-runner-metrics -n kube-prometheus-stack ``` ================================================ FILE: docs/register/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Registering runners --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3414) in GitLab Runner 15.0, a change to the registration request format prevents the GitLab Runner from communicating with earlier versions of GitLab. You must use a GitLab Runner version that is appropriate for the GitLab version, or upgrade the GitLab application. {{< /history >}} Runner registration is the process that links the runner with one or more GitLab instances. You must register the runner so that it can pick up jobs from the GitLab instance. ## Requirements Before you register a runner: - Install [GitLab Runner](../install/_index.md) on a server separate to where GitLab is installed. - For runner registration with Docker, install [GitLab Runner in a Docker container](../install/docker.md). ## Register with a runner authentication token {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29613) in GitLab 15.10. {{< /history >}} Prerequisites: - Obtain a runner authentication token. You can either: - Create an instance, group, or project runner. For instructions, see [manage runners](https://docs.gitlab.com/ci/runners/runners_scope). - Locate the runner authentication token in the `config.toml` file. Runner authentication tokens have the prefix, `glrt-`. After you register the runner, the configuration is saved to the `config.toml`. To register the runner with a [runner authentication token](https://docs.gitlab.com/security/tokens/#runner-authentication-tokens): 1. Run the register command: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register ``` If you are behind a proxy, add an environment variable and then run the registration command: ```shell export HTTP_PROXY=http://yourproxyurl:3128 export HTTPS_PROXY=http://yourproxyurl:3128 sudo -E gitlab-runner register ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register ``` {{< /tab >}} {{< tab title="Docker" >}} To register with a container, you can either: - Use a short-lived `gitlab-runner` container with the correct configuration volume mount: - For local system volume mounts: ```shell docker run --rm -it -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register ``` If you used a configuration volume other than `/srv/gitlab-runner/config` during installation, update the command with the correct volume. - For Docker volume mounts: ```shell docker run --rm -it -v gitlab-runner-config:/etc/gitlab-runner gitlab/gitlab-runner:latest register ``` - Use the executable inside an active runner container: ```shell docker exec -it gitlab-runner gitlab-runner register ``` {{< /tab >}} {{< /tabs >}} 1. Enter your GitLab URL: - For runners on GitLab Self-Managed, use the URL for your GitLab instance. For example, if your project is hosted on `gitlab.example.com/yourname/yourproject`, your GitLab instance URL is `https://gitlab.example.com`. - For runners on GitLab.com, the GitLab instance URL is `https://gitlab.com`. 1. Enter the runner authentication token. 1. Enter a description for the runner. 1. Enter the job tags, separated by commas. 1. Enter an optional maintenance note for the runner. 1. Enter the type of [executor](../executors/_index.md). - To register multiple runners on the same host machine, each with a different configuration, repeat the `register` command. - To register the same configuration on multiple host machines, use the same runner authentication token for each runner registration. For more information, see [Reusing a runner configuration](../fleet_scaling/_index.md#reusing-a-runner-configuration). You can also use the [non-interactive mode](../commands/_index.md#non-interactive-registration) to use additional arguments to register the runner: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker-windows" \ --docker-image mcr.microsoft.com/windows/servercore:1809_amd64 \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< /tabs >}} ## Register with a runner registration token (deprecated) > [!warning] > Runner registration tokens and several runner configuration arguments were > [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/380872). They are scheduled for removal > in GitLab 20.0. Use runner authentication tokens instead. For more information, see > [Migrating to the new runner registration workflow](https://docs.gitlab.com/ci/runners/new_creation_workflow/). Prerequisites: - Runner registration tokens must be [enabled](https://docs.gitlab.com/administration/settings/continuous_integration/#control-runner-registration) in the Admin Area. - Obtain a runner registration token at the desired instance, group, or project. For instructions, see [manage runners](https://docs.gitlab.com/ci/runners/runners_scope). After you register the runner, the configuration is saved to the `config.toml`. To register the runner with a [runner registration token](https://docs.gitlab.com/security/tokens/#runner-registration-tokens-legacy): 1. Run the register command: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register ``` If you are behind a proxy, add an environment variable and then run the registration command: ```shell export HTTP_PROXY=http://yourproxyurl:3128 export HTTPS_PROXY=http://yourproxyurl:3128 sudo -E gitlab-runner register ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register ``` {{< /tab >}} {{< tab title="Docker" >}} To launch a short-lived `gitlab-runner` container to register the container you created during installation: - For local system volume mounts: ```shell docker run --rm -it -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register ``` If you used a configuration volume other than `/srv/gitlab-runner/config` during installation, update the command with the correct volume. - For Docker volume mounts: ```shell docker run --rm -it -v gitlab-runner-config:/etc/gitlab-runner gitlab/gitlab-runner:latest register ``` {{< /tab >}} {{< /tabs >}} 1. Enter your GitLab URL: - For runners on GitLab Self-Managed, use the URL for your GitLab instance. For example, if your project is hosted on `gitlab.example.com/yourname/yourproject`, your GitLab instance URL is `https://gitlab.example.com`. - For GitLab.com, the GitLab instance URL is `https://gitlab.com`. 1. Enter the token you obtained to register the runner. 1. Enter a description for the runner. 1. Enter the job tags, separated by commas. 1. Enter an optional maintenance note for the runner. 1. Enter the type of [executor](../executors/_index.md). To register multiple runners on the same host machine, each with a different configuration, repeat the `register` command. You can also use the [non-interactive mode](../commands/_index.md#non-interactive-registration) to use additional arguments to register the runner: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker-windows" \ --docker-image mcr.microsoft.com/windows/servercore:1809_amd64 \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< /tabs >}} - `--access-level` creates a [protected runner](https://docs.gitlab.com/ci/runners/configure_runners/#prevent-runners-from-revealing-sensitive-information). - For a protected runner, use the `--access-level="ref_protected"` parameter. - For an unprotected runner, use `--access-level="not_protected"` or leave the value undefined. - `--maintenance-note` allows adding information you might find helpful for runner maintenance. The maximum length is 255 characters. ### Legacy-compatible registration process {{< history >}} - [Introduced](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4157) in GitLab 16.2. {{< /history >}} Runner registration tokens and several runner configuration arguments were [deprecated](https://gitlab.com/gitlab-org/gitlab/-/issues/379743). They are scheduled for removal in GitLab 20.0. To ensure minimal disruption to your automation workflow, the `legacy-compatible registration process` triggers if a runner authentication token is specified in the legacy parameter `--registration-token`. The legacy-compatible registration process ignores the following command-line parameters. These parameters can only be configured when a runner is created in the UI or with the API. - `--locked` - `--access-level` - `--run-untagged` - `--maximum-timeout` - `--paused` - `--tag-list` - `--maintenance-note` ## Register with a configuration template You can use a configuration template to register a runner with settings that are not supported by the `register` command. Prerequisites: - The volume for the location of the template file must be mounted on the GitLab Runner container. - A runner authentication or registration token: - Obtain a runner authentication token (recommended). You can either: - Obtain a runner authentication token at the desired instance, group, or project. For instructions, see [manage runners](https://docs.gitlab.com/ci/runners/runners_scope). - Locate the runner authentication token in the `config.toml` file. Runner authentication tokens have the prefix, `glrt-`. - Obtain a runner registration token (deprecated) for an instance, group, or project. For instructions, see [manage runners](https://docs.gitlab.com/ci/runners/runners_scope). The configuration template can be used for automated environments that do not support some arguments in the `register` command due to: - Size limits on environment variables based on the environment. - Command-line options that are not available for executor volumes for Kubernetes. > [!warning] > The configuration template supports only a single [`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section) > section and does not support global options. To register a runner: 1. Create a configuration template file with the `.toml` format and add your specifications. For example: ```toml [[runners]] [runners.kubernetes] [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` 1. Add the path to the file. You can use either: - The [non-interactive mode](../commands/_index.md#non-interactive-registration) in the command line: ```shell $ sudo gitlab-runner register \ --template-config /tmp/test-config.template.toml \ --non-interactive \ --url "https://gitlab.com" \ --token \ "# --registration-token if using the deprecated runner registration token" --name test-runner \ --executor kubernetes --host = "http://localhost:9876/" ``` - The environment variable in the `.gitlab.yaml` file: ```yaml variables: TEMPLATE_CONFIG_FILE = ``` If you update the environment variable, you do not need to add the file path in the `register` command each time you register. After you register the runner, the settings in the configuration template are merged with the `[[runners]]` entry created in the `config.toml`: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "test-runner" url = "https://gitlab.com" token = "glrt-" executor = "kubernetes" [runners.kubernetes] host = "http://localhost:9876/" bearer_token_overwrite_allowed = false image = "" namespace = "" namespace_overwrite_allowed = "" privileged = false service_account_overwrite_allowed = "" pod_labels_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` Template settings are merged only for options that are: - Empty strings - Null or non-existent entries - Zeroes Command-line arguments or environment variables take precedence over settings in the configuration template. For example, if the template specifies a `docker` executor, but the command line specifies `shell`, the configured executor is `shell`. ## Register a runner for GitLab Community Edition integration tests To test GitLab Community Edition integrations, use a configuration template to register a runner with a confined Docker executor. 1. Create a [project runner](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token). 1. Create a template with the `[[runners.docker.services]]` section: ```shell $ cat > /tmp/test-config.template.toml << EOF [[runners]] [runners.docker] [[runners.docker.services]] name = "mysql:latest" [[runners.docker.services]] name = "redis:latest" EOF ``` 1. Register the runner: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< /tabs >}} For more configuration options, see [Advanced configuration](../configuration/advanced-configuration.md). ## Registering runners with Docker After you register the runner with a Docker container: - The configuration is written to your configuration volume. For example, `/srv/gitlab-runner/config`. - The container uses the configuration volume to load the runner. > [!note] > If `gitlab-runner restart` runs in a Docker container, GitLab Runner starts a new process instead of restarting the existing process. > To apply configuration changes, restart the Docker container instead. ## Troubleshooting ### Error: `Check registration token` The `check registration token` error message displays when the GitLab instance does not recognize the runner registration token entered during registration. This issue can occur when either: - The instance, group, or project runner registration token was changed in GitLab. - An incorrect runner registration token was entered. When this error occurs, you can ask a GitLab administrator to: - Verify that the runner registration token is valid. - Confirm that runner registration in the project or group is [permitted](https://docs.gitlab.com/administration/settings/continuous_integration/#restrict-runner-registration-for-a-specific-group). ### Error: `410 Gone - runner registration disallowed` The `410 Gone - runner registration disallowed` error message displays when runner registration through registration tokens has been disabled. When this error occurs, you can ask a GitLab administrator to: - Verify that the runner registration token is valid. - Confirm that runner registration in the instance is [permitted](https://docs.gitlab.com/administration/settings/continuous_integration/#control-runner-registration). - In the case of a group or project runner registration token, verify that runner registration in the respective group and/or project [is allowed](https://docs.gitlab.com/ci/runners/runners_scope/#enable-use-of-runner-registration-tokens-in-projects-and-groups). ================================================ FILE: docs/runner_autoscale/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner Autoscaling --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} You can use GitLab Runner autoscaling to automatically scale the runner on public cloud instances. When you configure a runner to use autoscaler, you can handle increased CI/CD job loads by running multiple jobs simultaneously on your cloud infrastructure. In addition to the autoscaling options for public cloud instances, you can use the following container orchestration solutions for hosting and scaling a runner fleet. - Red Hat OpenShift Kubernetes clusters - Kubernetes clusters: AWS EKS, Azure, on-premise - Amazon Elastic Container Services clusters on AWS Fargate ## Configure the runner manager You must configure the runner manager to use GitLab Runner Autoscaling, both the Docker Machine Autoscaling solution and the GitLab Runner Autoscaler. The runner manager is a type of runner that creates multiple runners for autoscaling. It continuously polls GitLab for jobs and interacts with the public cloud infrastructure to create a new instance to execute jobs. The runner manager must run on a host machine that has GitLab Runner installed. Choose a distribution that Docker and GitLab Runner supports, like Ubuntu, Debian, CentOS, or RHEL. 1. Create an instance to host the runner manager. This **must not** be a spot instance (AWS), or spot virtual machine (GCP, Azure). 1. [Install GitLab Runner](../install/linux-repository.md) on the instance. 1. Add the cloud provider credentials to the Runner Manager host machine. > [!note] > You can host the runner manager in a container. > For [GitLab-hosted runners](https://docs.gitlab.com/ci/runners/), the runner manager is hosted on a virtual machine instance. ### Example credentials configuration for GitLab Runner Docker Machine Autoscaling This snippet is in the `runners.machine` section of the `config.toml` file. ``` toml [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 10 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-zone=x", "amazonec2-use-private-address=true", "amazonec2-security-group=xxxxx", ] ``` > [!note] > The credentials file is optional. > You can use an [AWS Identity and Access Management](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) > (IAM) instance profile for the runner manager in the AWS environment. > If you do not want to host the runner manager in AWS, you can use the credentials file. ## Implement a fault-tolerant design Start with at least two runner managers that use the same runner tags to create a fault-tolerant design and prevent runner manager host failures. For example, on GitLab.com, multiple runner managers are configured for [hosted runners on Linux](https://docs.gitlab.com/ci/runners/hosted_runners/linux/). Each runner manager has the tag `saas-linux-small-amd64`. Use observability and runner fleet metrics when you adjust autoscaling parameters to balance efficiency and performance for your organization's CI/CD workloads. ## Configure runner autoscaling executors After you configure the runner manager, configure the executors specific to autoscaling: - [Instance Executor](../executors/instance.md) - [Docker Autoscaling Executor](../executors/docker_autoscaler.md) - [Docker Machine Executor](../executors/docker_machine.md) > [!note] > You should use the Instance and Docker Autoscaling executors, as these comprise the > technology that replaces the Docker Machine autoscaler. ================================================ FILE: docs/runner_autoscale/gitlab-runner-autoscaler.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runner instance group autoscaler --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner instance group autoscaler is the successor to the autoscaling technology based on Docker Machine. The components of the GitLab Runner instance group autoscaling solution are: - Taskscaler: Manages the autoscaling logic, bookkeeping, and creates fleets for runner instances that use cloud provider autoscaling groups of instances. - [Fleeting](../fleet_scaling/fleeting.md): An abstraction for cloud provider virtual machines. - Cloud provider plugin: Handles the API calls to the target cloud platform and is implemented using a plugin development framework. Instance group autoscaling in GitLab Runner works as follows: 1. The runner manager continuously polls GitLab jobs. 1. In response, GitLab sends job payloads to the runner manager. 1. The runner manager interacts with the public cloud infrastructure to create a new instance to execute jobs. 1. The runner manager distributes these jobs to the available runners in the autoscaling pool. ![Overview of GitLab Next Runner Autoscaling](img/next-runner-autoscaling-overview.png) ## Configure the runner manager You must [configure the runner manager](_index.md#configure-the-runner-manager) to use the GitLab Runner instance group autoscaler. 1. Create an instance to host the runner manager. This **must not** be a spot instance (AWS), or spot virtual machine (GCP or Azure). 1. [Install GitLab Runner](../install/linux-repository.md) on the instance. 1. Add the cloud provider credentials to the runner manager host machine. > [!note] > You can host the runner manager in a container. > For GitLab.com and GitLab Dedicated [hosted runners](https://docs.gitlab.com/ci/runners/), the runner manager is hosted on a virtual machine instance. ### Example credentials configuration for GitLab Runner instance group autoscaler You can use an [AWS Identity and Access Management](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html) (IAM) instance profile for the runner manager in the AWS environment. If you do not want to host the runner manager in AWS, you can use a credentials file. For example: ``` toml ## credentials_file [default] aws_access_key_id=__REDACTED__ aws_secret_access_key=__REDACTED__ ``` The credentials file is optional. ## Supported public cloud instances The following autoscaling options are supported for public cloud compute instances: - Amazon Web Services EC2 instances - Google Compute Engine - Microsoft Azure Virtual Machines These cloud instances are supported by the GitLab Runner Docker Machine autoscaler as well. ## Supported platforms | Executor | Linux | macOS | Windows | |----------------------------|--------------------------------------|--------------------------------------|--------------------------------------| | Instance executor | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | {{< icon name="check-circle" >}} Yes | | Docker Autoscaler executor | {{< icon name="check-circle" >}} Yes | {{< icon name="dotted-circle" >}} No | {{< icon name="check-circle" >}} Yes | ================================================ FILE: docs/security/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Security for self-managed runners --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} A GitLab CI/CD pipeline is a workflow automation engine used for simple or complex DevOps automation tasks. Because these pipelines enable a remote code execution service, you should implement the following process to reduce security risks: - A systematic approach to configuring the security of the entire technology stack. - Ongoing rigorous reviews of the configuration and use of the platform. If you plan to run your GitLab CI/CD jobs on self-managed runners, then security risks exist for your compute infrastructure and network. The runner executes code defined in the CI/CD job. Any user that has the Developer role for the project's repository could compromise the security of the environment hosting the runner, whether intentional or not. This risk is even more acute if your self-managed runners are non-ephemeral and used for multiple projects. - A job from a repository embedded with malicious code can compromise the security of other repositories serviced by the non-ephemeral runner. - Depending on the executor, a job can install malicious code on the virtual machine where the runner is hosted. - Secret variables exposed to jobs running in a compromised environment can be stolen, including but not limited to the `CI_JOB_TOKEN`. - Users with the Developer role have access to submodules associated with the project, even if they don't have access to the upstream projects of the submodule. ## Security risks for different executors Depending on the executor you are using, you can face different security risks. ### Usage of Shell executor **High-security risks exist to your runner host and network when running builds with the `shell` executor**. The jobs are run with the permissions of the GitLab Runner's user and can steal code from other projects that are run on this server. Use it only for running trusted builds. ### Usage of Docker executor **Docker can be considered safe when running in non-privileged mode**. To make such a configuration more secure, run jobs as a non-root user in Docker containers with disabled `sudo` or dropped `SETUID` and `SETGID` capabilities. More granular permissions can be configured in non-privileged mode via the `cap_add`/`cap_drop` settings. > [!warning] > Privileged containers in Docker have all the root capabilities of the host VM. > For more information, check out the official Docker documentation > on [Runtime privilege and Linux capabilities](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities) It is **not advised** to run containers in privileged mode. When privileged mode is enabled, a user running a CI/CD job could gain full root access to the runner's host system, permission to mount and detach volumes, and run nested containers. By enabling privileged mode, you are effectively disabling all the container's security mechanisms and exposing your host to privilege escalation, which can lead to container breakout. If you use a Docker Machine executor, we also strongly recommend to use the `MaxBuilds = 1` setting, which ensures that a single autoscaled VM (potentially compromised because of the security weakness introduced by the privileged mode) is used to handle one and only one job. ### Usage of private Docker images with `if-not-present` pull policy When using the private Docker images support described in [advanced configuration: using a private container registry](../configuration/advanced-configuration.md#use-a-private-container-registry) you should use `always` as the `pull_policy` value. Especially you should use `always` pull policy if you are hosting a public, instance runner with the Docker or Kubernetes executors. Let's consider an example where the pull policy is set to `if-not-present`: 1. User A has a private image at `registry.example.com/image/name`. 1. User A starts a build on an instance runner: The build receives the registry credentials and pulls the image after authorization in registry. 1. The image is stored on an instance runner's host. 1. User B doesn't have access to the private image at `registry.example.com/image/name`. 1. User B starts a build that is using this image on the same instance runner as User A: Runner finds a local version of the image and uses it **even if the image could not be pulled because of missing credentials**. Therefore, if you host a runner that can be used by different users and different projects (with mixed private, and public access levels) you should never use `if-not-present` as the pull policy value, but use: - `never` - If you want to limit users to use the only image pre-downloaded by you. - `always` - If you want to give users the possibility to download any image from any registry. The `if-not-present` pull policy should be used **only** for specific runners used by trusted builds and users. Read the [pull policies documentation](../executors/docker.md#configure-how-runners-pull-images) for more information. ### Usage of SSH executor **SSH executors are susceptible to MITM attack (man-in-the-middle)**, because of missing `StrictHostKeyChecking` option. This will be fixed in one of the future releases. ### Usage of Parallels executor **Parallels executor is the safest possible option** because it uses full system virtualization and with VM machines that are configured to run in the isolated virtualization and VM machines that are configured to run in isolated mode. It blocks access to all peripherals and shared folders. ## Cloning a runner Runners use a token to identify to the GitLab Server. If you clone a runner then the cloned runner could be picking up the same jobs for that token. This is a possible attack vector to "steal" runner jobs. ## Security risks when using `GIT_STRATEGY: fetch` on shared environments When you set [`GIT_STRATEGY`](https://docs.gitlab.com/ci/runners/configure_runners/#git-strategy) to `fetch`, the runner attempts to reuse the local working copy of the Git repository. Using a local copy can improve the performance of CI/CD jobs. However, any user with access to that reusable copy can add code that executes in other users' pipelines. Git stores the contents of a submodule (a repository embedded inside another repository) in the parent repository's Git reflog. As a result, after a project's submodules have been initially cloned, subsequent jobs can access the contents of the submodules by running `git submodule update` in their script. This applies even if the submodules have been deleted and the user that initiated the job doesn't have access to the submodule projects. Use `GIT_STRATEGY: fetch` only when you trust all users who have access to the shared environment. ## Security hardening options ### Reduce the security risk of using privileged containers If you must run CI/CD jobs that require the use of Docker's `--privileged` flag, you can take these steps to reduce the security risk: - Run Docker containers with the `--privileged` flag enabled only on isolated and ephemeral virtual machines. - Configure dedicated runners that are meant to execute jobs that require the use of Docker's `--privileged` flag. Then configure these runners to execute jobs only on protected branches. ### Network segmentation GitLab Runner is designed to run user-controlled scripts. To reduce the attack surface if a job is malicious, you can consider running them in their own network segment. This would provide network separation from other infrastructure and services. All needs are unique, but for a cloud environment, this could include: - Configuring runner virtual machines in their own network segment - Blocking SSH access from the Internet to runner virtual machines - Restricting traffic between runner virtual machines - Filtering access to cloud provider metadata endpoints > [!note] > All runners will need outbound network connectivity to > GitLab.com or your GitLab instance. > Most jobs will also require outbound network connectivity to > the Internet - for dependency pulling etc. ### Secure the runner host If you are using a static host for a runner, whether bare-metal or virtual machine, you should implement security best practices for the host operating system. Malicious code executed in the context of a CI job could compromise the host, so security protocols can help mitigate the impact. Other points to keep in mind include securing or removing files such as SSH keys from the host system that may enable an attacker to access other endpoints in the environment. ### Clean up the `.git` folder after each build If you use a static host for your runner, you can implement an additional layer of security by enabling the `FF_ENABLE_JOB_CLEANUP` [feature flag](../configuration/feature-flags.md). When you enable `FF_ENABLE_JOB_CLEANUP`, the build directory your runner uses on the host is cleaned up after each build. ================================================ FILE: docs/shells/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Types of shells supported by GitLab Runner --- {{< details >}} - Tier: Free, Premium, Ultimate - Offering: GitLab.com, GitLab Self-Managed, GitLab Dedicated {{< /details >}} GitLab Runner implements shell script generators that allow executing builds on different systems. The shell scripts contain commands to execute all steps of the build: 1. `git clone` 1. Restore the build cache 1. Build commands 1. Update the build cache 1. Generate and upload the build artifacts The shells don't have any configuration options. The build steps are received from the commands defined in the [`script` directive in `.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/#script). The supported shells are: | Shell | Status | Description | |--------------|-----------------|-------------| | `bash` | Fully Supported | Bash (Bourne Again Shell). All commands executed in Bash context (default for all Unix systems). | | `sh` | Fully Supported | Sh (Bourne shell). All commands executed in Sh context (fallback for `bash` for all Unix systems). | | `powershell` | Fully Supported | PowerShell script. All commands are executed in PowerShell Desktop context. Default shell for jobs on Windows with the `kubernetes` and `docker-windows` executors. | | `pwsh` | Fully Supported | PowerShell script. All commands are executed in PowerShell Core context. Default shell for new runner registration on Windows, and for jobs with the `shell` executor. | If you want to select a particular shell to use other than the default, you must [specify the shell](../executors/shell.md#selecting-your-shell) in your `config.toml` file. ## Sh/Bash shells Sh/Bash is the default shell used on all Unix based systems. The bash script used in `.gitlab-ci.yml` is executed by piping the shell script to one of the following commands: ```shell # This command is used if the build should be executed in context # of another user (the shell executor) cat generated-bash-script | su --shell /bin/bash --login user # This command is used if the build should be executed using # the current user, but in a login environment cat generated-bash-script | /bin/bash --login # This command is used if the build should be executed in # a Docker environment cat generated-bash-script | /bin/bash ``` ### Shell profile loading For certain executors, the runner passes the `--login` flag as shown above, which also loads the shell profile. Anything that you have in your `.bashrc`, `.bash_logout`, [or any other dotfile](https://tldp.org/LDP/Bash-Beginners-Guide/html/sect_03_01.html#sect_03_01_02), is executed in your job. If a [job fails on the `Prepare environment`](../faq/_index.md#job-failed-system-failure-preparing-environment) stage, it is likely that something in the shell profile is causing the failure. A common failure is when there is a `.bash_logout` that tries to clear the console. To troubleshoot this error, check `/home/gitlab-runner/.bash_logout`. For example, if the `.bash_logout` file has a script section like the following, comment it out and restart the pipeline: ```shell if [ "$SHLVL" = 1 ]; then [ -x /usr/bin/clear_console ] && /usr/bin/clear_console -q fi ``` Executors that load shell profiles: - [`shell`](../executors/shell.md) - [`parallels`](../executors/parallels.md) (The shell profile of the target virtual machine is loaded) - [`virtualbox`](../executors/virtualbox.md) (The shell profile of the target virtual machine is loaded) - [`ssh`](../executors/ssh.md) (The shell profile of the target machine is loaded) ## PowerShell PowerShell Core is the default shell for new runner registration on Windows. However, this registration default applies only when you explicitly set a `shell` value in `config.toml`. When no `shell` is configured: - The `docker-windows` and `kubernetes` executors default to PowerShell Desktop at runtime. - The `shell` executor defaults to PowerShell Core. PowerShell doesn't support executing the build in context of another user. The generated PowerShell script is executed by saving its content to a file and passing the filename to the following command: - For PowerShell Desktop Edition: ```batch powershell -NoProfile -NonInteractive -ExecutionPolicy Bypass -Command generated-windows-powershell.ps1 ``` - For PowerShell Core Edition: ```batch pwsh -NoProfile -NonInteractive -ExecutionPolicy Bypass -Command generated-windows-powershell.ps1 ``` The following is an example PowerShell script: ```powershell $ErrorActionPreference = "Continue" # This will be set to 'Stop' when targetting PowerShell Core echo "Running on $([Environment]::MachineName)..." & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE echo "Cloning repository..." if( (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "C:\GitLab-Runner\builds\0\project-1" -PathType Container) ) { Remove-Item2 -Force -Recurse "C:\GitLab-Runner\builds\0\project-1" } elseif(Test-Path "C:\GitLab-Runner\builds\0\project-1") { Remove-Item -Force -Recurse "C:\GitLab-Runner\builds\0\project-1" } & "git" "clone" "https://gitlab.com/group/project.git" "Z:\Gitlab\tests\test\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "Checking out db45ad9a as main..." & "git" "checkout" "db45ad9af9d7af5e61b829442fd893d96e31250c" if(!$?) { Exit $LASTEXITCODE } if(Test-Path "..\..\..\cache\project-1\pages\main\cache.tgz" -PathType Leaf) { echo "Restoring cache..." & "gitlab-runner-windows-amd64.exe" "extract" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" if(!$?) { Exit $LASTEXITCODE } } else { if(Test-Path "..\..\..\cache\project-1\pages\main\cache.tgz" -PathType Leaf) { echo "Restoring cache..." & "gitlab-runner-windows-amd64.exe" "extract" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" if(!$?) { Exit $LASTEXITCODE } } } } if(!$?) { Exit $LASTEXITCODE } & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "`$ echo true" echo true } if(!$?) { Exit $LASTEXITCODE } & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "Archiving cache..." & "gitlab-runner-windows-amd64.exe" "archive" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" "--path" "vendor" if(!$?) { Exit $LASTEXITCODE } } if(!$?) { Exit $LASTEXITCODE } ``` ### Running Windows Batch You can execute Batch scripts from PowerShell using `Start-Process "cmd.exe" "/c C:\Path\file.bat"` for old Batch scripts not ported to PowerShell. ### Access `CMD` shell when PowerShell is the default The [Call `CMD` From Default PowerShell in GitLab CI](https://gitlab.com/guided-explorations/microsoft/windows/call-cmd-from-powershell) project demonstrates how to gain access to the `CMD` shell. This approach works when PowerShell is the default shell on a runner. ### Video walkthrough of working PowerShell examples The [Slicing and Dicing with PowerShell on GitLab CI](https://www.youtube.com/watch?v=UZvtAYwruFc) video is a walkthrough of the [PowerShell Pipelines on GitLab CI](https://gitlab.com/guided-explorations/microsoft/powershell/powershell-pipelines-on-gitlab-ci) Guided Exploration project. It was tested on: - Windows PowerShell and PowerShell Core 7 on [hosted runners on Windows for GitLab.com](https://docs.gitlab.com/ci/runners/hosted_runners/windows/). - PowerShell Core 7 in Linux Containers with the [Docker-Machine runner](../executors/docker_machine.md). The example can be copied to your own group or instance for testing. More details on what other GitLab CI patterns are demonstrated are available at the project page. ================================================ FILE: docs-locale/.markdownlint/.markdownlint-cli2.yaml ================================================ --- # Base Markdownlint configuration # Extended Markdownlint configuration in docs/.markdownlint/ # See https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md for explanations of each rule noInlineConfig: true config: # First, set the default default: true # Per-rule settings in alphabetical order code-block-style: # MD046 style: "fenced" emphasis-style: false # MD049 header-style: # MD003 style: "atx" hr-style: # MD035 style: "---" line-length: false no-duplicate-heading: # MD024 siblings_only: true no-emphasis-as-heading: false # MD036 no-inline-html: false # MD033 no-trailing-punctuation: # MD026 punctuation: ".,;:!。,;:!" no-trailing-spaces: false # MD009 ol-prefix: # MD029 style: "one" reference-links-images: false # MD052 ul-style: # MD004 style: "dash" link-fragments: false # MD051 table-column-style: false # MD060 ================================================ FILE: docs-locale/ja-jp/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、GitLab CI/CDと連携してパイプラインでジョブを実行するアプリケーションです。 開発者はGitLabにコードをプッシュするときに、自動化されたタスクを`.gitlab-ci.yml`ファイルで定義できます。これらのタスクには、テストの実行、アプリケーションのビルド、コードのデプロイなどが含まれる場合があります。GitLab Runnerは、これらのタスクをコンピューティングインフラストラクチャ上で実行するアプリケーションです。 管理者として、これらのCI/CDジョブが実行されるインフラストラクチャの提供と管理を行う責任があります。これには、GitLab Runnerアプリケーションのインストール、それらの設定、組織のCI/CDワークロードを処理するための十分な容量の確保が含まれます。 ## GitLab Runnerの機能 {#what-gitlab-runner-does} GitLab Runnerは、GitLabインスタンスに接続し、CI/CDジョブを待機します。パイプラインが実行されると、GitLabは利用可能なrunnerにジョブを送信します。runnerはジョブを実行し、その結果をGitLabに報告します。 GitLab Runnerには次の機能があります。 - 複数のジョブを同時に実行する。 - 複数のサーバーで複数のトークンを使用する(プロジェクトごとにも可能)。 - トークンあたりの同時実行ジョブの数を制限する。 - ジョブを次のいずれかの方法で実行する: - ローカル環境での実行 - Dockerコンテナを使用する - Dockerコンテナを使用し、SSH経由でジョブを実行する - 各種クラウドや仮想マシンハイパーバイザーでオートスケールとDockerコンテナを使用する - リモートSSHサーバーに接続する - Go言語で記述され、他の要件のない単一バイナリとして配布される。 - Bash、PowerShell Core、およびWindows PowerShellをサポートする。 - GNU/Linux、macOS、およびWindows(Dockerを実行できる環境)で動作する。 - ジョブ実行環境のカスタマイズが可能。 - 再起動なしで設定を自動的に再読み込みする。 - Docker、Docker-SSH、Parallels、SSHなどの実行環境に対応したシームレスなセットアップ。 - Dockerコンテナのキャッシュを有効にする。 - GNU/Linux、macOS、およびWindowsで、サービスとしてシームレスにインストールできます。 - PrometheusメトリクスHTTPサーバーを搭載。 - Prometheusメトリクスやその他のジョブ固有のデータをモニタリングし、GitLabに送信するレフェリーワーカー機能。 ## Runnerの実行フロー {#runner-execution-flow} 次の図は、Runnerが登録される仕組みと、ジョブがリクエストおよび処理される仕組みを示しています。また、どのアクションが[登録トークンと認証トークン](https://docs.gitlab.com/api/runners/#registration-and-authentication-tokens) 、および[ジョブトークン](https://docs.gitlab.com/ci/jobs/ci_job_token/)を使用するかについても説明します。 ```mermaid sequenceDiagram participant GitLab participant GitLabRunner participant Executor opt registration GitLabRunner ->>+ GitLab: POST /api/v4/runners with registration_token GitLab -->>- GitLabRunner: Registered with runner_token end loop job requesting and handling GitLabRunner ->>+ GitLab: POST /api/v4/jobs/request with runner_token GitLab -->>+ GitLabRunner: job payload with job_token GitLabRunner ->>+ Executor: Job payload Executor ->>+ GitLab: clone sources with job_token Executor ->>+ GitLab: download artifacts with job_token Executor -->>- GitLabRunner: return job output and status GitLabRunner -->>- GitLab: updating job output and status with job_token end ``` ## Runnerのデプロイオプション {#runner-deployment-options} ### GitLabでホストされるRunner {#gitlab-hosted-runners} [GitLabがホストするrunner](https://docs.gitlab.com/ci/runners/)はGitLabによって管理され、GitLab.comで利用可能です。これらのrunnerをインストールまたはメンテナンスする必要はありません。GitLabがサービスとして提供します。ただし、実行環境に対する制御は制限されており、インフラストラクチャをカスタマイズすることはできません。 ### Self-Managed Runner {#self-managed-runners} Self-Managed Runnerは、各自のインフラストラクチャでインストール、設定および管理するGitLab Runnerインスタンスです。すべてのGitLabインストールでSelf-Managed Runnerを[インストール](install/_index.md)して登録できます。管理者は通常、自己管理runnerを使用します。 GitLabがホストおよび管理するGitLab-hosted Runnerとは異なり、セルフマネージドRunnerは完全に制御できます。 ## GitLab Runnerのバージョン {#gitlab-runner-versions} 互換性の理由から、GitLab Runnerの[major.minor](https://en.wikipedia.org/wiki/Software_versioning)バージョンは、GitLabのメジャーバージョンおよびマイナーバージョンと同期している必要があります。古いバージョンのRunnerが、新しいGitLabバージョンでも動作する可能性があります(またはその逆でも動作する可能性があります)。ただし、バージョンが異なる場合、一部の機能が利用できなかったり、正常に動作しなかったりする可能性があります。 マイナーバージョンの更新間では、下位互換性が保証されています。ただし、GitLabのマイナーバージョンアップデートで新機能が追加されると、その機能を利用するにはGitLab Runnerも同じマイナーバージョンにアップデートしなければならない場合もあります。 独自のRunnerをホストしながらGitLab.comでリポジトリをホストしている場合は、GitLab.comが[継続的に更新される](https://gitlab.com/gitlab-org/release/tasks/-/issues)ため、常にGitLab Runnerを最新バージョンに[更新](install/_index.md)してください。 ## トラブルシューティング {#troubleshooting} 一般的な問題を[解決する](faq/_index.md)方法について説明します。 ## 用語集 {#glossary} - **GitLab Runner**: ターゲットコンピューティングプラットフォームで、GitLabパイプラインからCI/CDジョブを実行するアプリケーション。 - **Runner**: ジョブを実行できる、GitLab Runnerの設定済みインスタンス。executorのタイプに応じて、このマシンはRunnerマネージャーのローカル(`shell` executorまたは`docker` executor)であるか、またはオートスケーラーによって作成されたリモートマシン(`docker-autoscaler`または`kubernetes`)になります。 - **Runner設定**: UIに**Runner**として表示される`config.toml`の単一`[[runner]]`エントリ。 - **Runner manager**(Runnerマネージャー): `config.toml`ファイルを読み取り、すべてのrunner設定とジョブ実行を同時に実行するプロセス。 - **Machine**(マシン): Runnerが動作する仮想マシン(VM)またはポッド。GitLab Runnerは、一意の永続的なマシンIDを自動的に生成します。このため、複数のマシンに同じRunner設定が指定されている場合でも、ジョブは個別にルーティングされますが、Runner設定はUIでグループ化されます。 - **Executor**: GitLab Runnerがジョブを実行するために使用する方法(Docker、シェル、Kubernetesなど)。 - **パイプライン**: がGitLabにプッシュされると自動的に実行されるジョブのコレクション。 - **ジョブ**: パイプライン内の単一のタスク。テストの実行やアプリケーションのビルドなど。 - **Runner token**(Runnerトークン): runnerがGitLabで認証できるようにする一意の識別子。 - **タグ**: 実行できるジョブを決定するrunnerに割り当てられたラベル。 - **Concurrent jobs**(同時ジョブ): runnerが同時に実行できるジョブの数。 - **Self-managed runner**(セルフマネージドRunner): 独自のインフラストラクチャにインストールおよび管理されるrunner。 - **GitLab-hosted runner**(GitLabホスト型Runner): GitLabによって提供および管理されるrunner。 詳細については、公式の[GitLab用語リスト](https://docs.gitlab.com/development/documentation/styleguide/word_list/#gitlab-runner)と、[GitLab Runner](https://docs.gitlab.com/development/architecture/#gitlab-runner)のGitLabアーキテクチャのエントリも参照してください。 ## コントリビュート {#contributing} コントリビュートを歓迎します。詳細については、[`CONTRIBUTING.md`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CONTRIBUTING.md)と[開発ドキュメント](development/_index.md)を参照してください。 GitLab Runnerプロジェクトのレビュアーの方は、[GitLab Runnerのレビュー](development/reviewing-gitlab-runner.md)に関するドキュメントをお読みください。 [GitLab Runnerプロジェクトのリリースプロセス](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/PROCESS.md)を確認することもできます。 ## 変更履歴 {#changelog} 最近の変更を確認するには、[CHANGELOG](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CHANGELOG.md)を参照してください。 ## ライセンス {#license} このコードは、MITライセンスに従って配布されます。[LICENSE](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/LICENSE)ファイルをご確認ください。 ================================================ FILE: docs-locale/ja-jp/commands/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerのコマンド --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerには、ビルドの登録、管理、実行に使用する一連のコマンドがあります。 コマンドのリストは、以下を実行して確認できます: ```shell gitlab-runner --help ``` コマンドの後に`--help`を付加すると、そのコマンドに固有のヘルプページが表示されます: ```shell gitlab-runner --help ``` ## 環境変数を使用する {#using-environment-variables} ほとんどのコマンドは、コマンドへ設定を渡す方法として環境変数をサポートしています。 特定のコマンドに対して`--help`を呼び出すと、環境変数の名前を確認できます。たとえば、`run`コマンドのヘルプメッセージは次のようになります: ```shell gitlab-runner run --help ``` 出力は次のようになります: ```plaintext NAME: gitlab-runner run - run multi runner service USAGE: gitlab-runner run [command options] [arguments...] OPTIONS: -c, --config "/Users/ayufan/.gitlab-runner/config.toml" Config file [$CONFIG_FILE] ``` ## デバッグモードで実行する {#running-in-debug-mode} 未定義の動作またはエラーの原因を調べる場合は、デバッグモードを使用します。 コマンドをデバッグモードで実行するには、コマンドの先頭に`--debug`を追加します: ```shell gitlab-runner --debug ``` ## スーパーユーザー権限 {#super-user-permission} GitLab Runnerの設定にアクセスするコマンドは、スーパーユーザー(`root`)として実行する場合には動作が異なります。ファイルの場所は、コマンドを実行するユーザーに応じて異なります。 `gitlab-runner`コマンドを実行すると、実行中のモードが表示されます: ```shell $ gitlab-runner run INFO[0000] Starting multi-runner from /Users/ayufan/.gitlab-runner/config.toml ... builds=0 WARN[0000] Running in user-mode. WARN[0000] Use sudo for system-mode: WARN[0000] $ sudo gitlab-runner... ``` `user-mode`が使用するモードであると確信できる場合は、このモードを使用してください。それ以外の場合は、コマンドの先頭に`sudo`を付加します: ```shell $ sudo gitlab-runner run INFO[0000] Starting multi-runner from /etc/gitlab-runner/config.toml ... builds=0 INFO[0000] Running in system-mode. ``` Windowsの場合、コマンドプロンプトを管理者として実行する必要がある場合があります。 ## 設定ファイル {#configuration-file} GitLab Runnerの設定では[TOML](https://github.com/toml-lang/toml)形式が使用されます。 編集するファイルは次の場所にあります: 1. \*nixシステムでGitLab Runnerがスーパーユーザー(`root`)として実行されている場合は`/etc/gitlab-runner/config.toml` 1. \*nixシステムでGitLab Runnerが非rootユーザーとして実行されている場合は`~/.gitlab-runner/config.toml` 1. その他のシステムでは`./config.toml` ほとんどのコマンドは、カスタム設定ファイルを指定する引数を受け入れるため、1つのマシンで複数の異なる設定を持つことができます。カスタム設定ファイルを指定するには、`-c`または`--config`フラグを使用するか、`CONFIG_FILE`環境変数を使用します。 ## シグナル {#signals} システムシグナルを使用してGitLab Runnerを操作できます。以下のコマンドは、以下のシグナルをサポートしています: | コマンド | シグナル | アクション | |---------------------|---------------------|--------| | `register` | `SIGINT` | Runnerの登録をキャンセルし、すでに登録されている場合は削除します。 | | `run`、`run-single` | `SIGINT`、`SIGTERM` | 実行中のすべてのビルドを中断し、できるだけ早く終了します。すぐに終了するには2回使用します(**forceful shutdown**(強制シャットダウン))。 | | `run`、`run-single` | `SIGQUIT` | 新しいビルドの受け入れを停止します。実行中のビルドが完了したらすぐに終了します(**graceful shutdown**(正常なシャットダウン))。 | | `run` | `SIGHUP` | 設定ファイルを強制的に再読み込みします。 | たとえばRunnerの設定ファイルを強制的に再読み込みするには、次のように実行します: ```shell sudo kill -SIGHUP ``` [正常なシャットダウン](#gitlab-runner-stop-doesnt-shut-down-gracefully)の場合は次のようになります: ```shell sudo kill -SIGQUIT ``` {{< alert type="warning" >}} `shell`または`docker` executorを使用している場合は、正常なシャットダウンのために`killall`または`pkill`を**not**(使用しないでください)。これによりサブプロセスも強制終了されるため、シグナルが不適切に処理される可能性があります。ジョブを処理するメインプロセスでのみ使用してください。 {{< /alert >}} 一部のオペレーティングシステムは、サービスが失敗すると自動的に再起動するように設定されています(一部のプラットフォームではデフォルトです)。ご使用のオペレーティングシステムでこのように設定されている、上記のシグナルによってRunnerがシャットダウンされると、自動的にRunnerが再起動される可能性があります。 ## コマンドの概要 {#commands-overview} 引数を指定せずに`gitlab-runner`を実行すると、次のように表示されます: ```plaintext NAME: gitlab-runner - a GitLab Runner USAGE: gitlab-runner [global options] command [command options] [arguments...] VERSION: 17.10.1 (ef334dcc) AUTHOR: GitLab Inc. COMMANDS: list List all configured runners run run multi runner service register register a new runner reset-token reset a runner's token install install service uninstall uninstall service start start service stop stop service restart restart service status get status of a service run-single start single runner unregister unregister specific runner verify verify all registered runners wrapper start multi runner service wrapped with gRPC manager server fleeting manage fleeting plugins artifacts-downloader download and extract build artifacts (internal) artifacts-uploader create and upload build artifacts (internal) cache-archiver create and upload cache artifacts (internal) cache-extractor download and extract cache artifacts (internal) cache-init changed permissions for cache paths (internal) health-check check health for a specific address proxy-exec execute internal commands (internal) read-logs reads job logs from a file, used by kubernetes executor (internal) help, h Shows a list of commands or help for one command GLOBAL OPTIONS: --cpuprofile value write cpu profile to file [$CPU_PROFILE] --debug debug mode [$RUNNER_DEBUG] --log-format value Choose log format (options: runner, text, json) [$LOG_FORMAT] --log-level value, -l value Log level (options: debug, info, warn, error, fatal, panic) [$LOG_LEVEL] --help, -h show help --version, -v print the version ``` 以下で各コマンドの動作を詳しく説明します。 ## 登録関連コマンド {#registration-related-commands} 新しいRunnerを登録するか、Runnerが登録されている場合にリストして検証するには、次のコマンドを使用します。 - [`gitlab-runner register`](#gitlab-runner-register) - [インタラクティブ登録](#interactive-registration) - [非インタラクティブ登録](#non-interactive-registration) - [`gitlab-runner list`](#gitlab-runner-list) - [`gitlab-runner verify`](#gitlab-runner-verify) - [`gitlab-runner unregister`](#gitlab-runner-unregister) これらのコマンドでは次の引数がサポートされています: | パラメータ | デフォルト | 説明 | |------------|-----------------------------------------------------------|-------------| | `--config` | [設定ファイルセクション](#configuration-file)を参照 | 使用するカスタム設定ファイルを指定します | ### `gitlab-runner register` {#gitlab-runner-register} このコマンドは、GitLab [Runners API](https://docs.gitlab.com/api/runners/#register-a-new-runner)を使用して、GitLabにRunnerを登録します。 登録されたRunnerは[設定ファイル](#configuration-file)に追加されます。1つのGitLab Runnerインストールで複数の設定を使用できます。`gitlab-runner register`を実行すると、新しい設定エントリが追加されます。以前のエントリは削除されません。 Runnerは次のいずれかの方法で登録できます: - インタラクティブ - 非インタラクティブ {{< alert type="note" >}} RunnerはGitLab [Runners API](https://docs.gitlab.com/api/runners/#register-a-new-runner)を使用して直接登録できますが、設定は自動的に生成されません。 {{< /alert >}} #### インタラクティブ登録 {#interactive-registration} このコマンドは通常、インタラクティブモード(**デフォルト**)で使用されます。Runnerの登録中に複数の質問が表示されます。 この質問に対する回答を事前に入力するには、登録コマンドの呼び出し時に引数を追加します: ```shell gitlab-runner register --name my-runner --url "http://gitlab.example.com" --token my-authentication-token ``` あるいは`register`コマンドよりも前に環境変数を設定します: ```shell export CI_SERVER_URL=http://gitlab.example.com export RUNNER_NAME=my-runner export CI_SERVER_TOKEN=my-authentication-token gitlab-runner register ``` 設定可能なすべての引数と環境を確認するには、以下を実行します: ```shell gitlab-runner register --help ``` #### 非インタラクティブ登録 {#non-interactive-registration} 非インタラクティブ/無人モードで登録を使用することができます。 登録コマンドの呼び出し時に引数を指定できます: ```shell gitlab-runner register --non-interactive ``` あるいは`register`コマンドよりも前に環境変数を設定します: ```shell export REGISTER_NON_INTERACTIVE=true gitlab-runner register ``` {{< alert type="note" >}} ブール値パラメータは、コマンドラインで`--key={true|false}`を使用して渡す必要があります。 {{< /alert >}} #### `[[runners]]`設定テンプレートファイル {#runners-configuration-template-file} {{< history >}} - GitLab Runner 12.2で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4228)されました。 {{< /history >}} [設定テンプレートファイル](../register/_index.md#register-with-a-configuration-template)機能を使用して、Runnerの登録中に追加のオプションを設定できます。 ### `gitlab-runner list` {#gitlab-runner-list} このコマンドは、[設定ファイル](#configuration-file)に保存されているすべてのRunnerをリストします。 ### `gitlab-runner verify` {#gitlab-runner-verify} このコマンドは、登録されたRunnerがGitLabに接続できることを確認します。ただし、RunnerがGitLab Runnerサービスで使用されているかどうかは検証しません。出力例を次に示します: ```plaintext Verifying runner... is alive runner=fee9938e Verifying runner... is alive runner=0db52b31 Verifying runner... is alive runner=826f687f Verifying runner... is alive runner=32773c0f ``` GitLabから削除された古いRunnerを削除するには、次のコマンドを実行します。 {{< alert type="warning" >}} この操作は元に戻すことができません。この操作では設定ファイルが更新されます。このため、実行する前に`config.toml`のバックアップがあることを確認してください。 {{< /alert >}} ```shell gitlab-runner verify --delete ``` ### `gitlab-runner unregister` {#gitlab-runner-unregister} このコマンドは、GitLab [Runners API](https://docs.gitlab.com/api/runners/#delete-a-runner)を使用して、登録されているRunnerを登録解除します。 次のいずれかを指定する必要があります: - 完全なURLとRunnerのトークン。 - Runnerの名前。 `--all-runners`オプションを使用すると、アタッチされているすべてのRunnerの登録が解除されます。 {{< alert type="note" >}} RunnerはGitLab [Runners API](https://docs.gitlab.com/api/runners/#delete-a-runner)で登録解除できますが、ユーザーに対して設定は変更されません。 {{< /alert >}} - Runner登録トークンを使用してRunnerが作成された場合、Runner認証トークンを指定した`gitlab-runner unregister`を実行すると、Runnerが削除されます。 - RunnerがGitLab UIまたはRunners APIで作成された場合、Runner認証トークンを指定して`gitlab-runner unregister`を実行すると、Runnerマネージャーが削除されますが、Runnerは削除されません。Runnerを完全に削除するには、[Runner管理ページでRunnerを削除する](https://docs.gitlab.com/ci/runners/runners_scope/#delete-instance-runners)か、[`DELETE /runners`](https://docs.gitlab.com/api/runners/#delete-a-runner) REST APIエンドポイントを使用します。 1つのRunnerを登録解除するには、まず`gitlab-runner list`を実行してRunnerの詳細を取得します: ```plaintext test-runner Executor=shell Token=t0k3n URL=http://gitlab.example.com ``` 次にこの情報を使用して、次のいずれかのコマンドで登録を解除します。 {{< alert type="warning" >}} この操作は元に戻すことができません。この操作では設定ファイルが更新されます。このため、実行する前に`config.toml`のバックアップがあることを確認してください。 {{< /alert >}} #### URLおよびトークンを指定 {#by-url-and-token} ```shell gitlab-runner unregister --url "http://gitlab.example.com/" --token t0k3n ``` #### 名前を指定 {#by-name} ```shell gitlab-runner unregister --name test-runner ``` {{< alert type="note" >}} 指定された名前のRunnerが複数ある場合、最初のRunnerのみが削除されます。 {{< /alert >}} #### すべてのRunner {#all-runners} ```shell gitlab-runner unregister --all-runners ``` ### `gitlab-runner reset-token` {#gitlab-runner-reset-token} このコマンドはGitLab Runners APIを使用して、[Runner ID](https://docs.gitlab.com/api/runners/#reset-runners-authentication-token-by-using-the-runner-id)または[現在のトークン](https://docs.gitlab.com/api/runners/#reset-runners-authentication-token-by-using-the-current-token)のいずれかでRunnerのトークンをリセットします。 Runnerの名前(またはURLとID)が必要です。Runner IDでリセットする場合はオプションのPATが必要です。PATとRunner IDは、トークンがすでに期限切れになっている場合に使用することを目的としています。 `--all-runners`オプションを使用すると、アタッチされているRunnerのすべてのトークンがリセットされます。 #### Runnerの現在のトークンを使用 {#with-runners-current-token} ```shell gitlab-runner reset-token --name test-runner ``` #### PATとRunner名を使用 {#with-pat-and-runner-name} ```shell gitlab-runner reset-token --name test-runner --pat PaT ``` #### PAT、GitLab URL、およびRunner IDを使用 {#with-pat-gitlab-url-and-runner-id} ```shell gitlab-runner reset-token --url "https://gitlab.example.com/" --id 12345 --pat PaT ``` #### すべてのRunner {#all-runners-1} ```shell gitlab-runners reset-token --all-runners ``` ## サービス関連コマンド {#service-related-commands} 次のコマンドを使用すると、Runnerをシステムサービスまたはユーザーサービスとして管理できます。Runnerサービスをインストール、アンインストール、開始、および停止するために使用します。 - [`gitlab-runner install`](#gitlab-runner-install) - [`gitlab-runner uninstall`](#gitlab-runner-uninstall) - [`gitlab-runner start`](#gitlab-runner-start) - [`gitlab-runner stop`](#gitlab-runner-stop) - [`gitlab-runner restart`](#gitlab-runner-restart) - [`gitlab-runner status`](#gitlab-runner-status) - [複数のサービス](#multiple-services) - サービス関連コマンドの実行時に[**アクセスが拒否されました**](#access-denied-when-running-the-service-related-commands) すべてのサービス関連コマンドは、次の引数を受け入れます: | パラメータ | デフォルト | 説明 | |------------------|---------------------------------------------------|-------------| | `--service` | `gitlab-runner` | カスタムサービス名を指定します | | `--config` | [設定ファイル](#configuration-file)を参照 | 使用するカスタム設定ファイルを指定します | | `--user-service` | [ユーザーサービス](#user-service)を参照 | ユーザーサービス(systemd)として実行するようにGitLab Runnerを設定します | ### `gitlab-runner install` {#gitlab-runner-install} このコマンドは、GitLab Runnerをサービスとしてインストールします。受け入れられる引数のセットは、実行するシステムに応じて異なります。 **Windows**(Windows)で実行する場合、またはスーパーユーザーとして実行する場合は、`--user`フラグが受け入れられます。このフラグにより、**shell**(Shell) executorで実行されるビルドの権限を削除できます。 | パラメータ | デフォルト | 説明 | |-----------------------|---------------------------------------------------|-------------| | `--service` | `gitlab-runner` | 使用するサービス名を指定します | | `--config` | [設定ファイル](#configuration-file)を参照 | 使用するカスタム設定ファイルを指定します | | `--syslog` | `true`(systemd以外のシステムの場合) | サービスをシステムログ生成サービスと統合するかどうかを指定します | | `--working-directory` | 現在のディレクトリ | **shell**(Shell) executorを使用してビルドを実行するときにすべてのデータを保存するルートディレクトリを指定します | | `--user` | `root` | ビルドを実行するユーザーを指定します | | `--password` | なし | ビルドを実行するユーザーのパスワードを指定します | ### `gitlab-runner uninstall` {#gitlab-runner-uninstall} このコマンドは、GitLab Runnerがサービスとして実行されないようにするため、GitLab Runnerを停止してアンインストールします。 ### `gitlab-runner start` {#gitlab-runner-start} このコマンドは、GitLab Runnerサービスを開始します。 ### `gitlab-runner stop` {#gitlab-runner-stop} このコマンドは、GitLab Runnerサービスを停止します。 ### `gitlab-runner restart` {#gitlab-runner-restart} このコマンドは、GitLab Runnerサービスを停止してから開始します。 ### `gitlab-runner status` {#gitlab-runner-status} このコマンドは、GitLab Runnerサービスの状態を出力します。サービスが実行中の場合の終了コードは0で、サービスが実行されていない場合は0以外です。 ### 複数のサービス {#multiple-services} `--service`フラグを指定することで、複数の個別の設定を使用して複数のGitLab Runnerサービスをインストールできます。 ### ユーザーサービス {#user-service} 一部のinitシステム(`systemd`など)を使用することにより、サービスを[ユーザーサービス](https://wiki.archlinux.org/title/Systemd/User)として管理できます。initシステムにこの機能が含まれている場合、`gitlab-runner`サービスをユーザーサービスとして管理するには、サービス関連のコマンドを実行する際に`--user-service`フラグを指定します。 ## 実行関連コマンド {#run-related-commands} このコマンドを使用すると、GitLabからビルドをフェッチして処理できます。 ### `gitlab-runner run` {#gitlab-runner-run} `gitlab-runner run`コマンドは、GitLab Runnerがサービスとして開始されたときに実行されるメインコマンドです。`config.toml`から定義されているすべてのRunnerを読み取り、それらすべてを実行しようとします。 コマンドは実行され、[シグナルを受信する](#signals)まで動作します。 次のパラメータを受け入れます。 | パラメータ | デフォルト | 説明 | |-----------------------|-----------------------------------------------|-------------| | `--config` | [設定ファイル](#configuration-file)を参照 | 使用するカスタム設定ファイルを指定します | | `--working-directory` | 現在のディレクトリ | **shell**(Shell) executorを使用してビルドを実行するときにすべてのデータを保存するルートディレクトリを指定します | | `--user` | 現在のユーザー | ビルドを実行するユーザーを指定します | | `--syslog` | `false` | すべてのログをSysLog(Unix)またはEventLog(Windows)に送信します | | `--listen-address` | 空 | PrometheusメトリクスHTTPサーバーがリッスンするアドレス(`:`) | ### `gitlab-runner run-single` {#gitlab-runner-run-single} {{< history >}} - GitLab Runner 17.1で設定ファイルを使用する機能が[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37670)されました。 {{< /history >}} 1つのGitLabインスタンスから1つのビルドを実行するには、この補助コマンドを使用します。このコマンドでは次のことができます: - GitLab URLやRunnerトークンなど、すべてのオプションをCLIパラメータまたは環境変数として取ります。たとえば、すべてのパラメータが明示的に指定されたシングルジョブの場合は次のようになります: ```shell gitlab-runner run-single -u http://gitlab.example.com -t my-runner-token --executor docker --docker-image ruby:3.3 ``` - 設定ファイルを読み取って、特定のRunnerの設定を使用します。たとえば、設定ファイルが指定されたシングルジョブの場合は次のようになります: ```shell gitlab-runner run-single -c ~/.gitlab-runner/config.toml -r runner-name ``` `--help`フラグを使用すると、使用可能なすべての設定オプションを確認できます: ```shell gitlab-runner run-single --help ``` `--max-builds`オプションを使用して、Runnerが終了するまでに実行するビルドの数を制御できます。デフォルトの`0`は、Runnerにビルド制限がなく、ジョブが永久に実行されることを意味します。 `--wait-timeout`オプションを使用して、Runnerが終了するまでにジョブを待機する時間を制御することもできます。デフォルトの`0`は、Runnerにタイムアウトがなく、ジョブ間で永久に待機することを意味します。 ## 内部コマンド {#internal-commands} GitLab Runnerは単一バイナリとして配布され、ビルド中に使用されるいくつかの内部コマンドが含まれています。 ### `gitlab-runner artifacts-downloader` {#gitlab-runner-artifacts-downloader} GitLabからアーティファクトアーカイブをダウンロードします。 ### `gitlab-runner artifacts-uploader` {#gitlab-runner-artifacts-uploader} アーティファクトアーカイブをGitLabにアップロードします。 ### `gitlab-runner cache-archiver` {#gitlab-runner-cache-archiver} キャッシュアーカイブを作成し、ローカルに保存するか、外部サーバーにアップロードします。 ### `gitlab-runner cache-extractor` {#gitlab-runner-cache-extractor} ローカルまたは外部に保存されたファイルからキャッシュアーカイブを復元します。 ## トラブルシューティング {#troubleshooting} よくある落とし穴のいくつかについて説明します。 ### サービス関連コマンドの実行時に**アクセスが拒否されました** {#access-denied-when-running-the-service-related-commands} 通常、[サービス関連コマンド](#service-related-commands)を実行するには管理者権限が必要です: - Unix(Linux、macOS、FreeBSD)システムでは、`gitlab-runner`の前に`sudo`を付加します - Windowsシステムでは、管理者権限でのコマンドプロンプトを使用します。`Administrator`コマンドプロンプトを実行します。Windowsの検索ボックスに`Command Prompt`を書き込むには、右クリックして`Run as administrator`を選択します。管理者権限でのコマンドプロンプトを実行することを確認します。 ## `gitlab-runner stop`が正常にシャットダウンしない {#gitlab-runner-stop-doesnt-shut-down-gracefully} GitLab Runnerがホストにインストールされており、ローカルexecutorを実行すると、アーティファクトのダウンロードやアップロード、キャッシュの処理などの操作のために追加のプロセスが開始されます。これらのプロセスは`gitlab-runner`コマンドとして実行されます。つまり、`pkill -QUIT gitlab-runner`または`killall QUIT gitlab-runner`を使用してプロセスを強制終了できます。プロセスを強制終了すると、プロセスが担当するオペレーションが失敗します。 これを防ぐには、次の2つの方法があります: - kill(強制終了)シグナルとして`SIGQUIT`を使用して、Runnerをローカルサービス(`systemd`など)として登録し、`gitlab-runner stop`または`systemctl stop gitlab-runner.service`を使用します。この動作を有効にするための設定例を次に示します: ```ini ; /etc/systemd/system/gitlab-runner.service.d/kill.conf [Service] KillSignal=SIGQUIT TimeoutStopSec=infinity ``` - 設定の変更を適用するには、このファイルを作成した後、`systemctl daemon-reload`を使用して`systemd`を再読み込みします。 - `kill -SIGQUIT `を使用してプロセスを手動で強制終了します。メインの`gitlab-runner`プロセスの`pid`を確認する必要があります。これを確認するには、起動時に表示されるログを調べます: ```shell $ gitlab-runner run Runtime platform arch=arm64 os=linux pid=8 revision=853330f9 version=16.5.0 ``` ### システムIDステートファイルの保存: アクセスが拒否される {#saving-system-id-state-file-access-denied} GitLab Runner 15.7および15.8は、`config.toml`ファイルを含むディレクトリに対する書き込み権限がない場合、起動しない可能性があります。 GitLab Runnerは起動時に、`config.toml`を含むディレクトリにある`.runner_system_id`ファイルを検索します。`.runner_system_id`ファイルが見つからない場合、新しいファイルを作成します。GitLab Runnerに書き込み権限がない場合、起動が失敗します。 この問題を解決するには、一時的にファイル書き込み権限を許可して`gitlab-runner run`を実行します。`.runner_system_id`ファイルが作成されたら、権限を読み取り専用にリセットできます。 ================================================ FILE: docs-locale/ja-jp/configuration/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: Config.toml、証明書、オートスケール、プロキシ設定 title: GitLab Runnerを設定する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerの設定方法について説明します。 - [高度な設定オプション](advanced-configuration.md): [`config.toml`](https://github.com/toml-lang/toml)設定ファイルを使用してRunnerの設定を編集します。 - [自己署名証明書を使用する](tls-self-signed.md): GitLabサーバーへの接続時にTLSピアを検証する証明書を設定します。 - [Docker Machineでオートスケールする](autoscale.md): Docker Machineによって自動的に作成されたマシンでジョブを実行します。 - [AWS EC2でGitLab Runnerをオートスケールする](runner_autoscale_aws/_index.md): オートスケールされたAWS EC2インスタンスでジョブを実行します。 - [AWS FargateでGitLab CIをオートスケールする](runner_autoscale_aws_fargate/_index.md): GitLabカスタムexecutorでAWS Fargateドライバーを使用して、AWS ECSでジョブを実行します。 - [グラフィカルプロセッシングユニット](gpus.md): GPUを使用してジョブを実行します。 - [initシステム](init.md): GitLab Runnerは、オペレーティングシステムに基づいてinitサービスファイルをインストールします。 - [サポートされているShell](../shells/_index.md): Shellスクリプトジェネレーターを使用して、さまざまなシステムでビルドを実行します。 - [セキュリティに関する考慮事項](../security/_index.md): GitLab Runnerでジョブを実行する際のセキュリティへの潜在的な影響に注意してください。 - [Runnerのモニタリング](../monitoring/_index.md): Runnerの動作をモニタリングします。 - [Dockerキャッシュを自動的にクリーンアップする](../executors/docker.md#clear-the-docker-cache): ディスク容量が少なくなっている場合は、cronジョブを使用して古いコンテナとボリュームをクリーンアップします。 - [プロキシの背後で実行するようにGitLab Runnerを設定する](proxy.md): Linuxプロキシをセットアップし、GitLab Runnerを設定します。このセットアップは、Docker executorと適切に連携します。 - [Oracle Cloud Infrastructure ( OCI ) 用のGitLab Runnerを設定する](oracle_cloud_performance.md): OCIでGitLab Runnerのパフォーマンスを最適化します。 - [レート制限されたリクエストを処理する](proxy.md#handling-rate-limited-requests)。 - [GitLab Runner Operatorを設定する](configuring_runner_operator.md)。 ================================================ FILE: docs-locale/ja-jp/configuration/advanced-configuration.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: 高度な設定 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerと個別に登録されたRunnerの動作を変更するには、`config.toml`ファイルを修正します。 `config.toml`ファイルは次の場所にあります。 - GitLab Runnerがrootとして実行される場合、\*nixシステムでは`/etc/gitlab-runner/`にあります。このディレクトリは、サービス設定のパスでもあります。 - GitLab Runnerが非rootユーザーとして実行される場合、\*nixシステムでは`~/.gitlab-runner/`にあります。 - その他のシステムの`./`。 ほとんどのオプションでは、オプションを変更した場合にGitLab Runnerを再起動する必要はありません。これには、`[[runners]]`セクションのパラメータと`listen_address`を除くグローバルセクションのほとんどのパラメータが含まれます。Runnerがすでに登録されている場合は、再度登録する必要はありません。 GitLab Runnerは、設定の変更を3秒ごとに確認し、必要に応じて再読み込みします。またGitLab Runnerは、`SIGHUP`シグナルに応答して設定を再読み込みします。 ## 設定検証 {#configuration-validation} {{< history >}} - GitLab Runner 15.10で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3924)されました。 {{< /history >}} 設定検証は、`config.toml`ファイルの構造をチェックするプロセスです。設定バリデーターからの出力は、`info`レベルのメッセージのみを示します。 設定検証プロセスは、情報提供のみを目的としています。この出力から、Runner設定に関する潜在的な問題を特定できます。設定検証では、起こり得るすべての問題を検出できるとは限りません。また、メッセージがないからといって、`config.toml`ファイルに欠陥がないことが保証されるわけではありません。 ## グローバルセクション {#the-global-section} これらの設定はグローバルなものです。すべてのRunnerに適用されます。 | 設定 | 説明 | |----------------------|-------------| | `concurrent` | 登録されているすべてのRunnerで同時に実行できるジョブ数を制限します。各`[[runners]]`セクションで独自の制限を定義できますが、この値はそれらのすべての値を合計した最大値を設定します。たとえば、値が`10`の場合、同時に実行できるジョブは最大10個までとなります。`0`は禁止されています。この値を使用すると、Runnerプロセスは重大なエラーで終了します。[Docker Machine executor](autoscale.md#limit-the-number-of-vms-created-by-the-docker-machine-executor)、[インスタンスexecutor](../executors/instance.md)、[Docker Autoscaler executor](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance)、[`runners.custom_build_dir`設定](#the-runnerscustom_build_dir-section)でこの設定がどのように機能するかをご確認ください。 | | `log_level` | ログレベルを定義します。オプションには、`debug`、`info`、`warn`、`error`、`fatal`、`panic`があります。この設定は、コマンドライン引数の`--debug`、`-l`、または`--log-level`で設定されるレベルよりも優先度が低くなります。 | | `log_format` | ログ形式を指定します。オプションには、`runner`、`text`、`json`があります。この設定は、コマンドライン引数の`--log-format`で設定される形式よりも優先度が低くなります。デフォルト値は`runner`で、色分けのためのANSIエスケープコードが含まれています。 | | `check_interval` | Runnerが新しいジョブを確認する間隔を秒単位で定義します。デフォルト値は`3`です。`0`以下に設定すると、デフォルト値が使用されます。 | | `sentry_dsn` | Sentryへのすべてのシステムレベルのエラーの追跡を有効にします。 | | `connection_max_age` | GitLabサーバーへのTLSキープアライブ接続を再接続するまでの最大時間を指定します。デフォルト値は`15m`(15分)です。`0`以下に設定すると、接続は可能な限り持続します。 | | `listen_address` | Prometheusメトリクス用HTTPサーバーがリッスンするアドレス(`:`)を定義します。 | | `shutdown_timeout` | [強制シャットダウン操作](../commands/_index.md#signals)がタイムアウトになりプロセスが終了するまでの秒数を示します。デフォルト値は`30`です。`0`以下に設定すると、デフォルト値が使用されます。 | ### 設定の警告 {#configuration-warnings} #### ロングポーリングのイシュー {#long-polling-issues} GitLab Runnerは、GitLabのロングポーリングがGitLab Workhorseを介してオンになっている場合、いくつかの設定シナリオでロングポーリングのイシューが発生する可能性があります。これらは、設定に応じて、パフォーマンスのボトルネックから重大な処理遅延まで多岐にわたります。GitLab Runnerのワーカーは、長時間(GitLab Workhorseの設定である`-apiCiLongPollingDuration`(デフォルトは50秒)と一致)ロングポーリングリクエストで停止し、他のジョブが迅速に処理されるのを妨げる可能性があります。 このイシューは、GitLab Workhorseの`-apiCiLongPollingDuration`設定によって制御されるGitLab CI/CDのロングポーリング機能に関連しています。オンにすると、ジョブリクエストは、ジョブが利用可能になるのを待機している間、設定された時間までブロックされる可能性があります。 デフォルトのGitLab Workhorseのロングポーリングの設定値は50秒です(最近のGitLabバージョンではデフォルトでオンになっています)。 次に、設定例をいくつか示します: - Omnibus:`gitlab_workhorse['api_ci_long_polling_duration'] = "50s"` in `/etc/gitlab/gitlab.rb` - Helmチャート: `gitlab.webservice.workhorse.extraArgs`設定を使用 - CLI:`gitlab-workhorse -apiCiLongPollingDuration 50s` 詳細については、以下を参照してください: - [Runnerのロングポーリング](https://docs.gitlab.com/ci/runners/long_polling/) - [Workhorse](https://docs.gitlab.com/development/workhorse/configuration/)の設定 **Symptoms:** - 一部のプロジェクトからのジョブは、開始前に遅延が発生します(時間は、GitLabインスタンスのロングポーリングのタイムアウトと一致します)。 - 他のプロジェクトからのジョブはすぐに実行されます - Runnerログの警告メッセージ:`CONFIGURATION: Long polling issues detected` **Common problematic scenarios:** - ワーカーのスターベーションボトルネック: `concurrent`設定がRunnerの数よりも少ない(重大なボトルネック) - リクエストのボトルネック: `request_concurrency=1`のRunnerは、ロングポーリング中にジョブの遅延を引き起こします - ビルド制限のボトルネック: `limit`設定(≤2)が低いRunnerと`request_concurrency=1`の組み合わせ **Solution options:** GitLab Runnerは、問題のあるシナリオを自動的に検出し、警告メッセージで調整されたソリューションを提供します。一般的な解決策は次のとおりです: - Runnerの数を超えるように`concurrent`設定を増やします。 - 高ボリュームのRunnerの`request_concurrency`値を1より大きい値に設定します(デフォルトは1)。システムのステートを理解し、設定に最適な値を見つけるために、[Runnerのモニタリング](../monitoring/_index.md)をオンにすることを検討してください。ワークロードに基づいて`request_concurrency`を自動的に調整するには、`FF_USE_ADAPTIVE_REQUEST_CONCURRENCY`機能フラグを使用することを検討してください。適応的な並行処理については、[機能フラグ](feature-flags.md)のドキュメントを参照してください。 - `limit`設定と予想されるジョブボリュームのバランスを取ります。 **Example problematic configurations:** **シナリオ1: ワーカーのスターベーションボトルネック** ```toml concurrent = 2 # Only 2 concurrent workers [[runners]] name = "runner-1" [[runners]] name = "runner-2" [[runners]] name = "runner-3" # 3 runners, only 2 workers - severe bottleneck ``` **シナリオ2: リクエストのボトルネック** ```toml concurrent = 4 # 4 workers available [[runners]] name = "high-volume-runner" request_concurrency = 1 # Default: only 1 request at a time limit = 10 # Can handle 10 jobs, but only 1 request slot ``` **シナリオ3: ビルド制限のボトルネック** ```toml concurrent = 4 [[runners]] name = "limited-runner" limit = 2 # Only 2 builds allowed request_concurrency = 1 # Only 1 request at a time # Creates severe bottleneck: builds at capacity + request slot blocked by long polling ``` **Example corrected configuration:** ```toml concurrent = 4 # Adequate worker capacity [[runners]] name = "high-volume-runner" request_concurrency = 3 # Allow multiple simultaneous requests limit = 10 [[runners]] name = "balanced-runner" request_concurrency = 2 limit = 5 ``` 設定例 ```toml # Example `config.toml` file concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file log_level = "warning" log_format = "text" check_interval = 3 # Value in seconds [[runners]] name = "first" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "shell" (...) [[runners]] name = "second" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker" (...) [[runners]] name = "third" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker-autoscaler" (...) ``` ### `log_format`の例(一部) {#log_format-examples-truncated} #### `runner` {#runner} ```shell Runtime platform arch=amd64 os=darwin pid=37300 revision=HEAD version=development version Starting multi-runner from /etc/gitlab-runner/config.toml... builds=0 WARNING: Running in user-mode. WARNING: Use sudo for system-mode: WARNING: $ sudo gitlab-runner... Configuration loaded builds=0 listen_address not defined, metrics & debug endpoints disabled builds=0 [session_server].listen_address not defined, session endpoints disabled builds=0 ``` #### `text` {#text} ```shell INFO[0000] Runtime platform arch=amd64 os=darwin pid=37773 revision=HEAD version="development version" INFO[0000] Starting multi-runner from /etc/gitlab-runner/config.toml... builds=0 WARN[0000] Running in user-mode. WARN[0000] Use sudo for system-mode: WARN[0000] $ sudo gitlab-runner... INFO[0000] INFO[0000] Configuration loaded builds=0 INFO[0000] listen_address not defined, metrics & debug endpoints disabled builds=0 INFO[0000] [session_server].listen_address not defined, session endpoints disabled builds=0 ``` #### `json` {#json} ```shell {"arch":"amd64","level":"info","msg":"Runtime platform","os":"darwin","pid":38229,"revision":"HEAD","time":"2025-06-05T15:57:35+02:00","version":"development version"} {"builds":0,"level":"info","msg":"Starting multi-runner from /etc/gitlab-runner/config.toml...","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"Running in user-mode.","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"Use sudo for system-mode:","time":"2025-06-05T15:57:35+02:00"} {"level":"warning","msg":"$ sudo gitlab-runner...","time":"2025-06-05T15:57:35+02:00"} {"level":"info","msg":"","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"Configuration loaded","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"listen_address not defined, metrics \u0026 debug endpoints disabled","time":"2025-06-05T15:57:35+02:00"} {"builds":0,"level":"info","msg":"[session_server].listen_address not defined, session endpoints disabled","time":"2025-06-05T15:57:35+02:00"} ``` ### `check_interval`の仕組み {#how-check_interval-works} `config.toml`に複数の`[[runners]]`セクションが含まれている場合、GitLab Runnerは設定されているGitlabインスタンスに対して、ジョブリクエストを継続的にスケジュールするループ処理を行います。 次の例では、`check_interval`が10秒で、2つの`[[runners]]`セクション(`runner-1`と`runner-2`)があります。GitLab Runnerは10秒ごとにリクエストを送信し、5秒間スリープします。 1. `check_interval`の値(`10s`)を取得します。 1. Runnerのリスト(`runner-1`、`runner-2`)を取得します。 1. スリープ間隔(`10s / 2 = 5s`)を計算します。 1. 無限ループを開始します。 1. `runner-1`のジョブをリクエストします。 1. `5s`(5秒間)スリープします。 1. `runner-2`のジョブをリクエストします。 1. `5s`(5秒間)スリープします。 1. 繰り返します。 `check_interval`設定例 ```toml # Example `config.toml` file concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file. log_level = "warning" log_format = "json" check_interval = 10 # Value in seconds [[runners]] name = "runner-1" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "shell" (...) [[runners]] name = "runner-2" url = "Your Gitlab instance URL (for example, `https://gitlab.com`)" executor = "docker" (...) ``` この例では、Runnerのプロセスからのジョブリクエストが5秒ごとに行われます。`runner-1`と`runner-2`が同じGitlabインスタンスに接続されている場合、このGitlabインスタンスも5秒ごとにこのRunnerから新しいリクエストを受信します。 `runner-1`の最初のリクエストから次のリクエストまでの間に、合計で2回のスリープ期間が発生します。各期間の長さは5秒であるため、`runner-1`のリクエストの間隔は約10秒です。`runner-2`にも同じことが当てはまります。 定義するRunnerが多いと、スリープ間隔は短くなります。ただし、Runnerに対するリクエストが繰り返されるのは、他のすべてのRunnerに対するリクエストとそれぞれのスリープ期間が実行された後になります。 ## `[session_server]`セクション {#the-session_server-section} ジョブを操作するには、`[[runners]]`セクションの外側のルートレベルで`[session_server]`セクションを指定します。このセクションは、個々のRunnerごとではなく、すべてのRunnerに対して1回だけ設定を行います。 ```toml # Example `config.toml` file with session server configured concurrent = 100 # A global setting for job concurrency that applies to all runner sections defined in this `config.toml` file log_level = "warning" log_format = "runner" check_interval = 3 # Value in seconds [session_server] listen_address = "[::]:8093" # Listen on all available interfaces on port `8093` advertise_address = "runner-host-name.tld:8093" session_timeout = 1800 ``` `[session_server]`セクションを設定する場合 - `listen_address`と`advertise_address`には、`host:port`という形式を使用します。ここで、`host`はIPアドレス(`127.0.0.1:8093`)またはドメイン(`my-runner.example.com:8093`)です。Runnerはこの情報を使用して、セキュアな接続のためのTLS証明書を作成します。 - `listen_address`または`advertise_address`で定義されているIPアドレスとポートにGitLabが接続できることを確認します。 - アプリケーション設定[`allow_local_requests_from_web_hooks_and_services`](https://docs.gitlab.com/api/settings/#available-settings)を有効にしていない場合は、`advertise_address`がパブリックIPアドレスであることを確認してください。 | 設定 | 説明 | |---------------------|-------------| | `listen_address` | セッションサーバーの内部URL。 | | `advertise_address` | セッションサーバーにアクセスするためのURL。GitLab RunnerはこのURLをGitlabに公開します。定義されていない場合は、`listen_address`が使用されます。 | | `session_timeout` | ジョブの完了後、セッションがアクティブな状態を維持できる秒数。タイムアウトによってジョブの終了がブロックされます。デフォルトは`1800`(30分)です。 | セッションサーバーとターミナルサポートを無効にするには、`[session_server]`セクションを削除します。 {{< alert type="note" >}} Runnerインスタンスがすでに実行中の場合は、`[session_server]`セクションの変更を有効にするために`gitlab-runner restart`を実行する必要があることがあります。 {{< /alert >}} GitLab Runner Dockerイメージを使用している場合は、[`docker run`コマンド](../install/docker.md)に`-p 8093:8093`を追加して、ポート`8093`を公開する必要があります。 ## `[[runners]]`セクション {#the-runners-section} 各`[[runners]]`セクションは1つのRunnerを定義します。 | 設定 | 説明 | |---------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `name` | Runnerの説明(情報提供のみを目的としています) | | `url` | GitLabインスタンスのURL。 | | `token` | Runner認証トークン。Runnerの登録中に取得されます。[登録トークンとは異なります](https://docs.gitlab.com/api/runners/#registration-and-authentication-tokens)。 | | `tls-ca-file` | HTTPSを使用する場合に、ピアを検証するための証明書を含むファイル。[自己署名証明書またはカスタム認証局のドキュメント](tls-self-signed.md)を参照してください。 | | `tls-cert-file` | HTTPSを使用する場合に、ピアとの認証に使用する証明書を含むファイル。 | | `tls-key-file` | HTTPSを使用する場合に、ピアとの認証に使用する秘密キーを含むファイル。 | | `limit` | この登録済みRunnerが同時に処理できるジョブ数の制限を設定します。`0`(デフォルト)は、制限なしを意味します。この設定が[Docker Machine](autoscale.md#limit-the-number-of-vms-created-by-the-docker-machine-executor)、[Instance](../executors/instance.md)、[Docker Autoscaler](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance)の各executorでどのように機能するかについては、関連ドキュメントを参照してください。 | | `executor` | RunnerがCI/CDジョブを実行するために使用するホストのオペレーティングシステムの環境またはコマンドプロセッサ。詳細については、[executor](../executors/_index.md)を参照してください。 | | `shell` | スクリプトを生成するShellの名前。デフォルト値は[プラットフォームに応じて異なります](../shells/_index.md)。 | | `builds_dir` | 選択したexecutorのコンテキストでビルドが保存されるディレクトリの絶対パス。たとえば、ローカル、Docker、またはSSH環境で使用します。 | | `cache_dir` | 選択したexecutorのコンテキストでビルドキャッシュが保存されるディレクトリの絶対パス。たとえば、ローカル、Docker、またはSSH環境で使用します。`docker` executorが使用されている場合、このディレクトリを`volumes`パラメータに含める必要があります。 | | `environment` | 環境変数を追加または上書きします。 | | `request_concurrency` | GitLabからの新しいジョブに対する同時リクエスト数を制限します。デフォルトは`1`です。ジョブフローを制御するために`concurrency`、`limit`、および`request_concurrency`がどのように相互作用するかについて詳しくは、[GitLab Runnerの並行処理チューニングに関するKB記事](https://support.gitlab.com/hc/en-us/articles/21324350882076-GitLab-Runner-Concurrency-Tuning-Understanding-request-concurrency)をご覧ください。 | | `output_limit` | ビルドログの最大サイズ(KB単位)。デフォルトは`4096`(4 MB)です。 | | `pre_get_sources_script` | Gitリポジトリの更新とサブモジュールの更新の前にRunnerで実行されるコマンド。たとえば、最初にGitクライアントの設定を調整するために使用します。複数のコマンドを挿入するには、(三重引用符で囲まれた)複数行の文字列または`\n`文字を使用します。 | | `post_get_sources_script` | Gitリポジトリの更新とサブモジュールの更新の後にRunnerで実行されるコマンド。複数のコマンドを挿入するには、(三重引用符で囲まれた)複数行の文字列または`\n`文字を使用します。 | | `pre_build_script` | ジョブの実行前にRunnerで実行されるコマンド。複数のコマンドを挿入するには、(三重引用符で囲まれた)複数行の文字列または`\n`文字を使用します。 | | `post_build_script` | ジョブの実行直後、`after_script`の実行前にRunnerで実行されるコマンド。複数のコマンドを挿入するには、(三重引用符で囲まれた)複数行の文字列または`\n`文字を使用します。 | | `clone_url` | GitLabインスタンスのURLを上書きします。RunnerがGitlab URLに接続できない場合にのみ使用されます。 | | `debug_trace_disabled` | [デバッグトレーシング](https://docs.gitlab.com/ci/variables/#enable-debug-logging)を無効にします。`true`に設定すると、`CI_DEBUG_TRACE`が`true`に設定されていても、デバッグログ(トレース)は無効のままになります。 | | `clean_git_config` | Git設定をクリーンアップします。詳しくは、[Git設定をクリーンアップする](#cleaning-git-configuration)を参照してください。 | | `referees` | 結果をジョブアーティファクトとしてGitLabに渡す追加のジョブモニタリングワーカー。 | | `unhealthy_requests_limit` | 新規ジョブリクエストの`unhealthy`応答の数。この数を超えると、Runnerワーカーは無効になります。 | | `unhealthy_interval` | 異常なリクエストの制限を超えた後に、Runnerワーカーが無効になる期間。`3600 s`、`1 h 30 min`などの構文をサポートしています。 | | `job_status_final_update_retry_limit` | GitLab Runnerが最終ジョブ状態をGitLabインスタンスにプッシュする操作を再試行できる最大回数。 | 例: ```toml [[runners]] name = "example-runner" url = "http://gitlab.example.com/" token = "TOKEN" limit = 0 executor = "docker" builds_dir = "" shell = "" environment = ["ENV=value", "LC_ALL=en_US.UTF-8"] clone_url = "http://gitlab.example.local" ``` ### 従来の`/ci` URLサフィックス {#legacy-ci-url-suffix} {{< history >}} - [GitLab Runner 1.0.0](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/289)で非推奨になりました。 - 警告がGitLab Runner 18.7.0で追加されました。 {{< /history >}} 1.0.0より前のバージョンのGitLab Runnerでは、RunnerのURLは`/ci`サフィックスで設定されていました(例:`url = "https://gitlab.example.com/ci"`)。このサフィックスは不要になったため、設定から削除する必要があります。 `config.toml`に`/ci`サフィックスを含むURLが含まれている場合、GitLab Runnerは設定を処理するときに自動的にそれを削除します。ただし、イシューの可能性を回避するために、設定ファイルを更新してサフィックスを削除する必要があります。 #### 既知の問題 {#known-issues} - Gitサブモジュールの認証の失敗: `GIT_SUBMODULE_FORCE_HTTPS=true`が設定されている場合、サブモジュールは`fatal: could not read Username for 'https://gitlab.example.com': terminal prompts disabled`のような認証エラーでクローンに失敗する可能性があります。このイシューは、`/ci`サフィックスがGit URLの書き換えルールを妨げるために発生します。詳しくは、[issue 581678](https://gitlab.com/gitlab-org/gitlab/-/work_items/581678#note_2934077238)をご覧ください。 **Problematic configuration**: ```toml [[runners]] name = "legacy-runner" url = "https://gitlab.example.com/ci" # Remove the /ci suffix token = "TOKEN" executor = "docker" ``` **Corrected configuration**: ```toml [[runners]] name = "legacy-runner" url = "https://gitlab.example.com" # /ci suffix removed token = "TOKEN" executor = "docker" ``` GitLab Runnerが`/ci`サフィックスを含むURLで起動すると、警告メッセージをログに記録します: ```plaintext WARNING: The runner URL contains a legacy '/ci' suffix. This suffix is deprecated and should be removed from the configuration. Git submodules may fail to clone with authentication errors if this suffix is present. Please update the 'url' field in your config.toml to remove the '/ci' suffix. See https://docs.gitlab.com/runner/configuration/advanced-configuration.html#legacy-ci-url-suffix for more information. ``` この警告を解決するには、`config.toml`ファイルを編集し、`url`フィールドから`/ci`サフィックスを削除します。 ### `clone_url`の仕組み {#how-clone_url-works} Runnerが使用できないURLでGitLabインスタンスが利用可能な場合は、`clone_url`を設定できます。 たとえば、ファイアウォールが原因でRunnerがURLにアクセスできない場合があります。Runnerが`192.168.1.23`上のノードに接続できる場合は、`clone_url`を`http://192.168.1.23`に設定します。 `clone_url`が設定されると、Runnerは`http://gitlab-ci-token:s3cr3tt0k3n@192.168.1.23/namespace/project.git`の形式でクローンURLを作成します。 {{< alert type="note" >}} `clone_url`は、Git LFSエンドポイントまたはアーティファクトのアップロードとダウンロードには影響しません。 {{< /alert >}} #### Git LFSエンドポイントを変更する {#modify-git-lfs-endpoints} [Git LFS](https://docs.gitlab.com/topics/git/lfs/)エンドポイントを変更するには、次のいずれかのファイルで`pre_get_sources_script`を設定します。 - `config.toml`: ```toml pre_get_sources_script = "mkdir -p $RUNNER_TEMP_PROJECT_DIR/git-template; git config -f $RUNNER_TEMP_PROJECT_DIR/git-template/config lfs.url https://" ``` - `.gitlab-ci.yml`: ```yaml default: hooks: pre_get_sources_script: - mkdir -p $RUNNER_TEMP_PROJECT_DIR/git-template - git config -f $RUNNER_TEMP_PROJECT_DIR/git-template/config lfs.url https://localhost ``` ### `unhealthy_requests_limit`と`unhealthy_interval`の仕組み {#how-unhealthy_requests_limit-and-unhealthy_interval-works} GitLabインスタンスが長期間使用できない場合(バージョンのアップグレード中など)、そのRunnerはアイドル状態になります。GitLabインスタンスが再び使用可能になっても、Runnerは後の30~60分間は、ジョブ処理を再開しません。 Runnerがアイドル状態になる期間を増減するには、`unhealthy_interval`設定を変更します。 RunnerのGitLabサーバーへの接続試行回数を変更し、アイドル状態になる前に異常なスリープを受信するには、`unhealthy_requests_limit`設定を変更します。詳細については、[`check_interval`の仕組み](advanced-configuration.md#how-check_interval-works)を参照してください。 ## executor {#the-executors} 次のexecutorを使用できます。 | executor | 必要な設定 | ジョブの実行場所 | |---------------------|-------------------------------------------------------------------------|----------------| | `shell` | | ローカルShell。デフォルトのexecutor。 | | `docker` | `[runners.docker]`と[Docker Engine](https://docs.docker.com/engine/) | Dockerコンテナ。 | | `docker-windows` | `[runners.docker]`と[Docker Engine](https://docs.docker.com/engine/) | Windows Dockerコンテナ。 | | `ssh` | `[runners.ssh]` | SSH、リモート。 | | `parallels` | `[runners.parallels]`と`[runners.ssh]` | Parallels VM、SSHで接続。 | | `virtualbox` | `[runners.virtualbox]`と`[runners.ssh]` | VirtualBox VM、SSHで接続。 | | `docker+machine` | `[runners.docker]`と`[runners.machine]` | `docker`と同じ。ただし、[オートスケールDocker Machine](autoscale.md)を使用。 | | `kubernetes` | `[runners.kubernetes]` | Kubernetesポッド。 | | `docker-autoscaler` | `[docker-autoscaler]`と`[runners.autoscaler]` | `docker`と同じ。ただし、オートスケールインスタンスを使用してCI/CDジョブをコンテナ内で実行。 | | `instance` | `[docker-autoscaler]`と`[runners.autoscaler]` | `shell`と同じ。ただし、オートスケールインスタンスを使用してCI/CDジョブをホストインスタンス上で直接実行。 | ## Shell {#the-shells} Shell executorを使用するように設定されている場合、CI/CDジョブはホストマシンでローカルに実行されます。サポートされているオペレーティングシステムのShellは次のとおりです。 | Shell | 説明 | |--------------|-------------| | `bash` | Bash(Bourne-shell)スクリプトを生成します。すべてのコマンドはBashコンテキストで実行されます。すべてのUnixシステムのデフォルトです。 | | `sh` | Sh(Bourne-shell)スクリプトを生成します。すべてのコマンドはShコンテキストで実行されます。すべてのUnixシステムで`bash`のフォールバックとして使用されます。 | | `powershell` | PowerShellスクリプトを生成します。すべてのコマンドはPowerShell Desktopのコンテキストで実行されます。 | | `pwsh` | PowerShellスクリプトを生成します。すべてのコマンドはPowerShell Coreのコンテキストで実行されます。これは、WindowsのデフォルトShellです。 | `shell`オプションが`bash`または`sh`に設定されている場合、Bashの[ANSI-C引用符の処理方法](https://www.gnu.org/software/bash/manual/html_node/ANSI_002dC-Quoting.html)を使用して、ジョブスクリプトがShellエスケープされます。 ### POSIX準拠のShellを使用する {#use-a-posix-compliant-shell} GitLab Runner 14.9以降では、`dash`などのPOSIX準拠のShellを使用するには、`FF_POSIXLY_CORRECT_ESCAPES`[機能フラグを有効にします](feature-flags.md)。有効にすると、POSIX準拠のShellエスケープメカニズムである[二重引用符](https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02)が使用されます。 ## `[runners.docker]`セクション {#the-runnersdocker-section} 次の設定は、Dockerコンテナのパラメータを定義します。これらの設定は、Docker executorを使用するようにRunnerが設定されている場合に適用されます。 サービスとしての[Docker-in-Docker](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker)、またはジョブ内で設定されているコンテナランタイムは、これらのパラメータを継承しません。 | パラメータ | 例 | 説明 | |------------------------------------|--------------------------------------------------|-------------| | `allowed_images` | `["ruby:*", "python:*", "php:*"]` | `.gitlab-ci.yml`ファイルで指定できるイメージのワイルドカードリスト。この設定がない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorまたは[Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executorで使用します。 | | `allowed_privileged_images` | | `privileged`が有効になっている場合に、特権モードで実行される`allowed_images`のワイルドカードサブセット。この設定がない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorで使用します。 | | `allowed_pull_policies` | | `.gitlab-ci.yml`ファイルまたは`config.toml`ファイルで指定できるプルポリシーのリスト。指定されていない場合、`pull-policy`で指定されたプルポリシーのみが許可されます。[Docker](../executors/docker.md#allow-docker-pull-policies) executorで使用します。 | | `allowed_services` | `["postgres:9", "redis:*", "mysql:*"]` | `.gitlab-ci.yml`ファイルで指定できるサービスのワイルドカードリスト。この設定がない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorまたは[Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executorで使用します。 | | `allowed_privileged_services` | | `privileged`または`services_privileged`が有効になっている場合に、特権モードで実行できる`allowed_services`のワイルドカードサブセット。この設定がない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorで使用します。 | | `cache_dir` | | Dockerキャッシュを保存するディレクトリ。絶対パス、または現在の作業ディレクトリを基準にした相対パスを指定できます。詳細については、`disable_cache`を参照してください。 | | `cap_add` | `["NET_ADMIN"]` | コンテナにLinux機能を追加します。 | | `cap_drop` | `["DAC_OVERRIDE"]` | コンテナから追加のLinux機能を削除します。 | | `cpuset_cpus` | `"0,1"` | コントロールグループの`CpusetCpus`。文字列。 | | `cpuset_mems` | `"0,1"` | コントロールグループの`CpusetMems`。文字列。 | | `cpu_shares` | | 相対CPU使用率を設定するために使用されるCPU共有の数。デフォルトは`1024`です。 | | `cpus` | `"2"` | CPUの数(Docker 1.13以降で利用可能)。文字列。 | | `devices` | `["/dev/net/tun"]` | 追加のホストデバイスをコンテナと共有します。 | | `device_cgroup_rules` | | カスタムデバイスの`cgroup`ルール(Docker 1.28以降で利用可能)。 | | `disable_cache` | | Docker executorには、グローバルキャッシュ(他のexecutorと同様)とDockerボリュームに基づくローカルキャッシュという2つのレベルのキャッシュがあります。この設定フラグは、自動的に作成された(ホストディレクトリにマップされていない)キャッシュボリュームの使用を無効にするローカルキャッシュでのみ機能します。つまり、ビルドの一時ファイルを保持するコンテナの作成を防ぐだけであり、Runnerが[分散キャッシュモード](autoscale.md#distributed-runners-caching)で設定されている場合は、キャッシュを無効にしません。 | | `disable_entrypoint_overwrite` | | イメージエントリポイントの上書きを無効にします。 | | `dns` | `["8.8.8.8"]` | コンテナが使用するDNSサーバーのリスト。 | | `dns_search` | | DNS検索ドメインのリスト。 | | `extra_hosts` | `["other-host:127.0.0.1"]` | コンテナ環境で定義する必要があるホスト。 | | `gpus` | | Dockerコンテナ用のGPUデバイス。`docker` CLIと同じ形式を使用します。詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/containers/resource_constraints/#gpu)を参照してください。[GPUを有効にするための設定](gpus.md#docker-executor)が必要です。 | | `group_add` | `["docker"]` | コンテナプロセスを実行するためのグループをさらに追加します。 | | `helper_image` | | (高度)リポジトリのクローンやアーティファクトのアップロードに使用される[デフォルトのヘルパーイメージ](#helper-image)。 | | `helper_image_flavor` | | ヘルパーイメージフレーバー(`alpine`、`alpine3.21`、`alpine-latest`、`ubi-fips`、または`ubuntu`)を設定します。`alpine`がデフォルトです。`alpine`フレーバーは`alpine-latest`と同じバージョンを使用します。 | | `helper_image_autoset_arch_and_os` | | 基盤となるOSを使用して、ヘルパーイメージのアーキテクチャとOSを設定します。 | | `host` | | カスタムDockerエンドポイント。デフォルトは`DOCKER_HOST`環境変数または`unix:///var/run/docker.sock`です。 | | `hostname` | | Dockerコンテナのカスタムホスト名。 | | `image` | `"ruby:3.3"` | ジョブを実行するイメージ。 | | `links` | `["mysql_container:mysql"]` | ジョブを実行するコンテナにリンクする必要があるコンテナ。 | | `memory` | `"128m"` | メモリ制限。文字列。 | | `memory_swap` | `"256m"` | 合計メモリ制限。文字列。 | | `memory_reservation` | `"64m"` | メモリのソフト制限。文字列。 | | `network_mode` | | コンテナをカスタムネットワークに追加します。 | | `mac_address` | `92:d0:c6:0a:29:33` | コンテナのMACアドレス。 | | `oom_kill_disable` | | メモリ不足(`OOM`)エラーが発生した場合に、コンテナ内のプロセスを終了しません。 | | `oom_score_adjust` | | `OOM`スコアの調整。正の値は、プロセスを早期に終了することを意味します。 | | `privileged` | `false` | コンテナを特権モードで実行します。安全ではありません。 | | `services_privileged` | | サービスを特権モードで実行できるようにします。設定されていない場合(デフォルト)、代わりに`privileged`の値が使用されます。[Docker](../executors/docker.md#allow-docker-pull-policies) executorで使用します。安全ではありません。 | | `pull_policy` | | イメージプルポリシー(`never`、`if-not-present`、または`always`(デフォルト))。詳細については、[プルポリシーのドキュメント](../executors/docker.md#configure-how-runners-pull-images)を参照してください。[複数のプルポリシー](../executors/docker.md#set-multiple-pull-policies)の追加、[失敗したプルの再試行](../executors/docker.md#retry-a-failed-pull)、[プルポリシーの制限](../executors/docker.md#allow-docker-pull-policies)も可能です。 | | `runtime` | | Dockerコンテナのランタイム。 | | `isolation` | | コンテナ分離テクノロジー(`default`、`hyperv`、および`process`)。Windowsのみ。 | | `security_opt` | | セキュリティオプション(`docker run`の--security-opt)。`:`で区切られたキー/値のリストを取得します。`systempaths`仕様はサポートされていません。詳細については、[issue 36810](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/36810)をご覧ください。 | | `shm_size` | `300000` | イメージの共有メモリサイズ(バイト単位)。 | | `sysctls` | | `sysctl`のオプション。 | | `tls_cert_path` | macOSの場合: `/Users//.boot2docker/certs` | `ca.pem`、`cert.pem`、または`key.pem`が保存され、Dockerへの安全なTLS接続を確立するために使用されるディレクトリ。この設定は`boot2docker`で使用します。 | | `tls_verify` | | Dockerデーモンへの接続のTLS検証を有効または無効にします。デフォルトでは無効になっています。デフォルトでは、GitLab RunnerはSSH経由でDocker Unixソケットに接続します。UnixソケットはRTLSをサポートしておらず、暗号化と認証を提供するためにSSHを使用してHTTP経由で通信します。通常、`tls_verify`を有効にする必要はありません。有効にする場合には、追加の設定が必要です。`tls_verify`を有効にするには、デーモンが(デフォルトのUnixソケットではなく)ポートでリッスンする必要があり、GitLab Runner Dockerホストはデーモンがリッスンしているアドレスを使用する必要があります。 | | `user` | | コンテナ内のすべてのコマンドを、指定されたユーザーとして実行します。 | | `userns_mode` | | ユーザーネームスペースの再マッピングオプションが有効になっている場合の、コンテナおよびDockerサービス用のユーザーネームスペースモード。Docker 1.10以降で利用可能です。詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/security/userns-remap/#disable-namespace-remapping-for-a-container)を参照してください。 | | `ulimit` | | コンテナに渡されるUlimit値。Docker `--ulimit`フラグと同じ構文を使用します。 | | `volumes` | `["/data", "/home/project/cache"]` | マウントする必要がある追加ボリューム。Docker `-v`フラグと同じ構文。 | | `volumes_from` | `["storage_container:ro"]` | 別のコンテナから継承するボリュームのリスト。形式は`[:]`です。アクセスレベルはデフォルトで読み取り/書き込みですが、手動で`ro`(読み取り専用)または`rw`(読み取り/書き込み)に設定できます。 | | `volume_driver` | | コンテナに使用するボリュームドライバー。 | | `wait_for_services_timeout` | `30` | Dockerサービスを待機する時間。無効にするには`-1`に設定します。デフォルトは`30`です。 | | `container_labels` | | Runnerによって作成された各コンテナに追加するラベルのセット。ラベルの値には、展開用の環境変数を含めることができます。 | | `services_limit` | | ジョブごとに許可されるサービスの最大数を設定します。`-1`(デフォルト)は、制限がないことを意味します。 | | `service_cpuset_cpus` | | サービスに使用する`cgroups CpusetCpus`を含む文字列値。 | | `service_cpu_shares` | | サービスの相対CPU使用率を設定するために使用されるCPUシェア数(デフォルトは[`1024`](https://docs.docker.com/engine/containers/resource_constraints/#cpu))。 | | `service_cpus` | | サービスのCPU数を表す文字列値。Docker 1.13以降で利用可能です。 | | `service_gpus` | | Dockerコンテナ用のGPUデバイス。`docker` CLIと同じ形式を使用します。詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/containers/resource_constraints/#gpu)を参照してください。[GPUを有効にするための設定](gpus.md#docker-executor)が必要です。 | | `service_memory` | | サービスのメモリ制限を表す文字列値。 | | `service_memory_swap` | | サービスの合計メモリ制限を表す文字列値。 | | `service_memory_reservation` | | サービスのメモリのソフト制限を表す文字列値。 | ### `[[runners.docker.services]]`セクション {#the-runnersdockerservices-section} ジョブと実行する追加の[サービス](https://docs.gitlab.com/ci/services/)を指定します。利用可能なイメージのリストについては、[Docker Registry](https://hub.docker.com)を参照してください。各サービスは個別のコンテナで実行され、ジョブにリンクされます。 | パラメータ | 例 | 説明 | |---------------|------------------------------------|-------------| | `name` | `"registry.example.com/svc1"` | サービスとして実行されるイメージの名前。 | | `alias` | `"svc1"` | サービスへのアクセスに使用できる追加の[エイリアス名](https://docs.gitlab.com/ci/services/#available-settings-for-services)。 | | `entrypoint` | `["entrypoint.sh"]` | コンテナのエントリポイントとして実行されるコマンドまたはスクリプト。構文は[Dockerfile ENTRYPOINT](https://docs.docker.com/reference/dockerfile/#entrypoint)ディレクティブに似ており、各Shellトークンは配列内の個別の文字列です。[GitLab Runner 13.6](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173)で導入されました。 | | `command` | `["executable","param1","param2"]` | コンテナのコマンドとして使用されるコマンドまたはスクリプト。構文は[Dockerfile CMD](https://docs.docker.com/reference/dockerfile/#cmd)ディレクティブに似ており、各Shellトークンは配列内の個別の文字列です。[GitLab Runner 13.6](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27173)で導入されました。 | | `environment` | `["ENV1=value1", "ENV2=value2"]` | サービスコンテナの環境変数を付加または上書きします。 | 例: ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" memory = "128m" memory_swap = "256m" memory_reservation = "64m" oom_kill_disable = false cpuset_cpus = "0,1" cpuset_mems = "0,1" cpus = "2" dns = ["8.8.8.8"] dns_search = [""] service_memory = "128m" service_memory_swap = "256m" service_memory_reservation = "64m" service_cpuset_cpus = "0,1" service_cpus = "2" services_limit = 5 privileged = false group_add = ["docker"] cap_add = ["NET_ADMIN"] cap_drop = ["DAC_OVERRIDE"] devices = ["/dev/net/tun"] disable_cache = false wait_for_services_timeout = 30 cache_dir = "" volumes = ["/data", "/home/project/cache"] extra_hosts = ["other-host:127.0.0.1"] shm_size = 300000 volumes_from = ["storage_container:ro"] links = ["mysql_container:mysql"] allowed_images = ["ruby:*", "python:*", "php:*"] allowed_services = ["postgres:9", "redis:*", "mysql:*"] [runners.docker.ulimit] "rtprio" = "99" [[runners.docker.services]] name = "registry.example.com/svc1" alias = "svc1" entrypoint = ["entrypoint.sh"] command = ["executable","param1","param2"] environment = ["ENV1=value1", "ENV2=value2"] [[runners.docker.services]] name = "redis:2.8" alias = "cache" [[runners.docker.services]] name = "postgres:9" alias = "postgres-db" [runners.docker.sysctls] "net.ipv4.ip_forward" = "1" ``` ### `[runners.docker]`セクションのボリューム {#volumes-in-the-runnersdocker-section} ボリュームの詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/storage/volumes/)を参照してください。 次の例は、`[runners.docker]`セクションでボリュームを指定する方法を示しています。 #### 例1: データボリュームを追加する {#example-1-add-a-data-volume} データボリュームは、1つ以上のコンテナ内で特別に指定されたディレクトリで、Union File Systemをバイパスします。データボリュームは、コンテナのライフサイクルに依存せず、データを永続化するように設計されています。 ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" privileged = false disable_cache = true volumes = ["/path/to/volume/in/container"] ``` この例では、コンテナ内の`/path/to/volume/in/container`という場所に新しいボリュームが作成されます。 #### 例2: ホストディレクトリをデータボリュームとしてマウントする {#example-2-mount-a-host-directory-as-a-data-volume} コンテナの外部にディレクトリを保存する場合は、Dockerデーモンのホストからコンテナにディレクトリをマウントできます。 ```toml [runners.docker] host = "" hostname = "" tls_cert_path = "/Users/ayufan/.boot2docker/certs" image = "ruby:3.3" privileged = false disable_cache = true volumes = ["/path/to/bind/from/host:/path/to/bind/in/container:rw"] ``` この例では、CI/CDホストの`/path/to/bind/from/host`をコンテナ内の`/path/to/bind/in/container`で使用します。 GitLab Runner 11.11以降では、定義された[サービス](https://docs.gitlab.com/ci/services/)についても[同様にホストディレクトリをマウント](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1261)します。 ### プライベートコンテナレジストリを使用する {#use-a-private-container-registry} ジョブのイメージのソースとしてプライベートレジストリを使用するには、[CI/CD変数](https://docs.gitlab.com/ci/variables/)`DOCKER_AUTH_CONFIG`を使用して認証を設定します。次のいずれかで変数を設定できます。 - プロジェクトのCI/CD設定内で[`file`タイプ](https://docs.gitlab.com/ci/variables/#use-file-type-cicd-variables)として設定 - `config.toml`ファイル内で設定 `if-not-present`プルポリシーでプライベートレジストリを使用すると、[セキュリティ上の影響](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy)が生じる可能性があります。プルポリシーの仕組みの詳細については、[Runnerがイメージをプルする方法を設定する](../executors/docker.md#configure-how-runners-pull-images)を参照してください。 プライベートコンテナレジストリの使用に関する詳細については、以下を参照してください。 - [プライベートコンテナレジストリからのイメージへのアクセス](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry) - [`.gitlab-ci.yml`キーワードリファレンス](https://docs.gitlab.com/ci/yaml/#image) Runnerによって実行されるステップの要約を次に示します。 1. レジストリ名がイメージ名から検出されます。 1. 値が空でない場合、executorはこのレジストリに対する認証設定を検索します。 1. 最後に、指定されたレジストリに対応する認証が見つかった場合、以降のプルではその認証が使用されます。 #### GitLab統合レジストリのサポート {#support-for-gitlab-integrated-registry} GitLabは、ジョブのデータとともに、統合レジストリの認証情報を送信します。これらの認証情報は、レジストリの認証パラメータリストに自動的に追加されます。 このステップの後、レジストリに対する認証は、`DOCKER_AUTH_CONFIG`変数で追加された設定と同様に進みます。 ジョブでは、GitLab統合レジストリのイメージがプライベートまたは保護されている場合でも、任意のイメージを使用できます。ジョブがアクセスできるイメージの詳細については、[CI/CDジョブトークンのドキュメント](https://docs.gitlab.com/ci/jobs/ci_job_token/)を参照してください。 #### Docker認証解決の優先順位 {#precedence-of-docker-authorization-resolving} 前述のように、GitLab Runnerはさまざまな方法で送信される認証情報を使用して、レジストリに対してDockerを認証できます。適切なレジストリを見つけるために、次の優先順位が考慮されます。 1. `DOCKER_AUTH_CONFIG`で設定された認証情報 1. GitLab Runnerホストでローカルに設定された認証情報(`~/.docker/config.json`または`~/.dockercfg`ファイルに保存)(例: ホストで`docker login`を実行した場合)。 1. ジョブのペイロードとともにデフォルトで送信される認証情報(例: 前述の*統合レジストリ*の認証情報)。 レジストリに対して最初に検出された認証情報が使用されます。たとえば、`DOCKER_AUTH_CONFIG`変数を使用して*統合レジストリ*の認証情報を追加すると、デフォルトの認証情報が上書きされます。 ## `[runners.parallels]`セクション {#the-runnersparallels-section} 次にParallelsのパラメータを示します。 | パラメータ | 説明 | |---------------------|-------------| | `base_name` | クローンされるParallels VMの名前。 | | `template_name` | Parallels VMにリンクされたテンプレートのカスタム名。オプション。 | | `disable_snapshots` | 無効にした場合、ジョブが完了するとVMは破棄されます。 | | `allowed_images` | 許可される`image`/`base_name`値のリスト。これらの値は正規表現として表されます。詳細については、[ベースVMイメージを上書きする](#overriding-the-base-vm-image)セクションを参照してください。 | 例: ```toml [runners.parallels] base_name = "my-parallels-image" template_name = "" disable_snapshots = false ``` ## `[runners.virtualbox]`セクション {#the-runnersvirtualbox-section} 次にVirtualBoxのパラメータを示します。このexecutorは、VirtualBoxマシンを制御するために`vboxmanage`実行可能ファイルに依存しています。そのため、Windowsホストでは`PATH`環境変数を調整する必要があります(`PATH=%PATH%;C:\Program Files\Oracle\VirtualBox`)。 | パラメータ | 説明 | |---------------------|-------------| | `base_name` | クローンされるVirtualBox VMの名前。 | | `base_snapshot` | リンクされたクローンを作成する際の特定のVMスナップショットの名前またはUUID。この値が空であるか省略されている場合は、現在のスナップショットが使用されます。現在のスナップショットが存在しない場合は、スナップショットが作成されます。ただし、`disable_snapshots`がtrueでない場合は、ベースVMの完全なクローンが作成されます。 | | `base_folder` | 新しいVMを保存するフォルダー。この値が空であるか省略されている場合は、デフォルトのVMフォルダーが使用されます。 | | `disable_snapshots` | 無効にした場合、ジョブが完了するとVMは破棄されます。 | | `allowed_images` | 許可される`image`/`base_name`値のリスト。これらの値は正規表現として表されます。詳細については、[ベースVMイメージを上書きする](#overriding-the-base-vm-image)セクションを参照してください。 | | `start_type` | VMの起動時のグラフィカルフロントエンドタイプ。 | 例: ```toml [runners.virtualbox] base_name = "my-virtualbox-image" base_snapshot = "my-image-snapshot" disable_snapshots = false start_type = "headless" ``` `start_type`パラメータは、仮想イメージの起動時に使用されるグラフィカルフロントエンドを決定します。有効な値は、ホストとゲストの組み合わせでサポートされている`headless`(デフォルト)、`gui`、または`separate`です。 ## ベースVMイメージを上書きする {#overriding-the-base-vm-image} Parallels executorとVirtualBox executorの両方で、`base_name`で指定されたベースVM名を上書きできます。そのためには、`.gitlab-ci.yml`ファイルの[image](https://docs.gitlab.com/ci/yaml/#image)パラメータを使用します。 下位互換性のため、デフォルトではこの値を上書きできません。`base_name`で指定されたイメージのみが許可されます。 ユーザーが`.gitlab-ci.yml`の[image](https://docs.gitlab.com/ci/yaml/#image)パラメータを使用してVMイメージを選択できるようにするには、次のようにします。 ```toml [runners.virtualbox] ... allowed_images = [".*"] ``` この例では、既存のVMイメージであればどれでも使用できます。 `allowed_images`パラメータは、正規表現のリストです。必要な精度に応じて設定を細かく指定できます。たとえば、特定のVMイメージのみを許可したい場合は、次のような正規表現を使用できます。 ```toml [runners.virtualbox] ... allowed_images = ["^allowed_vm[1-2]$"] ``` この例では、`allowed_vm1`と`allowed_vm2`のみが許可されます。その他の試行はすべてエラーになります。 ## `[runners.ssh]`セクション {#the-runnersssh-section} 次のパラメータは、SSH接続を定義します。 | パラメータ | 説明 | |------------------------------------|-------------| | `host` | 接続先 | | `port` | ポートデフォルトは`22`です。 | | `user` | ユーザー名。 | | `password` | パスワード。 | | `identity_file` | SSH秘密キーのファイルパス(`id_rsa`、`id_dsa`、または`id_edcsa`)。ファイルは暗号化されていない状態で保存する必要があります。 | | `disable_strict_host_key_checking` | この値は、Runnerが厳密なホストキーチェックを使用するかどうかを決定します。デフォルトは`true`です。GitLab 15.0では、デフォルト値、または指定されていない場合の値は`false`です。 | 例: ```toml [runners.ssh] host = "my-production-server" port = "22" user = "root" password = "production-server-password" identity_file = "" ``` ## `[runners.machine]`セクション {#the-runnersmachine-section} 次のパラメータは、Docker Machineベースのオートスケール機能を定義します。詳細については、[Docker Machine Executorのオートスケール設定](autoscale.md)を参照してください。 | パラメータ | 説明 | |-----------------------------------|-------------| | `MaxGrowthRate` | Runnerに並行して追加できるマシンの最大数。デフォルトは`0`(制限なし)です。 | | `IdleCount` | _アイドル_状態で作成され待機する必要があるマシンの数。 | | `IdleScaleFactor` | 使用中マシンの数の係数として示される_アイドル_マシンの数。浮動小数点数形式である必要があります。詳細については、[オートスケールのドキュメント](autoscale.md#the-idlescalefactor-strategy)を参照してください。`0.0`がデフォルトです。 | | `IdleCountMin` | `IdleScaleFactor`使用時に作成され_アイドル_状態で待機する必要があるマシンの最小数。デフォルトは1です。 | | `IdleTime` | マシンが削除されるまでにそのマシンが_アイドル_状態を維持する時間(秒単位)。 | | `[[runners.machine.autoscaling]]` | オートスケール設定の上書きが含まれている複数のセクション。現在の時刻に一致する式を含む最後のセクションが選択されます。 | | `OffPeakPeriods` | 非推奨: スケジューラがOffPeakモードになっている時間帯。cron形式のパターンの配列([下記](#periods-syntax)を参照)。 | | `OffPeakTimezone` | 非推奨: OffPeakPeriodsで指定された時刻のタイムゾーン。`Europe/Berlin`のようなタイムゾーン文字列です。省略または空の場合、デフォルトはホストのロケールシステム設定です。GitLab Runnerは、`ZONEINFO`環境変数で指定されたディレクトリまたは解凍済みzipファイルでタイムゾーンデータベースを検索し、次にUnixシステム上の既知のインストール場所を検索し、最後に`$GOROOT/lib/time/zoneinfo.zip`内を検索します。 | | `OffPeakIdleCount` | 非推奨: `IdleCount`と同様ですが、_オフピーク_の時間帯を対象としています。 | | `OffPeakIdleTime` | 非推奨: `IdleTime`と同様ですが、_オフピーク_の時間帯を対象としています。 | | `MaxBuilds` | マシンが削除されるまでの最大ジョブ(ビルド)数。 | | `MachineName` | マシンの名前。`%s`を含める**必要があります**。これは一意のマシン識別子に置き換えられます。 | | `MachineDriver` | Docker Machineの`driver`。詳細については、[Docker Machine設定のクラウドプロバイダーセクション](autoscale.md#supported-cloud-providers)を参照してください。 | | `MachineOptions` | MachineDriverのDocker Machineオプション。詳細については、[サポートされているクラウドプロバイダー](autoscale.md#supported-cloud-providers)を参照してください。AWSのすべてのオプションの詳細については、Docker Machineリポジトリの[AWS](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md)プロジェクトと[GCP](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/gce.md)プロジェクトを参照してください。 | ### `[[runners.machine.autoscaling]]`セクション {#the-runnersmachineautoscaling-sections} 次のパラメータは、[Instance](../executors/instance.md) executorまたは[Docker Autoscaler](../executors/docker_autoscaler.md#example-aws-autoscaling-for-1-job-per-instance) executorを使用する際に利用可能な設定を定義します。 | パラメータ | 説明 | |-------------------|-------------| | `Periods` | このスケジュールがアクティブな時間帯。cron形式のパターンの配列([下記](#periods-syntax)を参照)。 | | `IdleCount` | _アイドル_状態で作成され待機する必要があるマシンの数。 | | `IdleScaleFactor` | (実験的機能)使用中のマシン数の係数として示される_アイドル_マシンの数。浮動小数点数形式である必要があります。詳細については、[オートスケールのドキュメント](autoscale.md#the-idlescalefactor-strategy)を参照してください。`0.0`がデフォルトです。 | | `IdleCountMin` | `IdleScaleFactor`使用時に作成され_アイドル_状態で待機する必要があるマシンの最小数。デフォルトは1です。 | | `IdleTime` | マシンが削除されるまでにそのマシンが_アイドル_状態である時間(秒単位)。 | | `Timezone` | `Periods`で指定された時刻のタイムゾーン。`Europe/Berlin`のようなタイムゾーン文字列です。省略または空の場合、デフォルトはホストのロケールシステム設定です。GitLab Runnerは、`ZONEINFO`環境変数で指定されたディレクトリまたは解凍済みzipファイルでタイムゾーンデータベースを検索し、次にUnixシステム上の既知のインストール場所を検索し、最後に`$GOROOT/lib/time/zoneinfo.zip`内を検索します。 | 例: ```toml [runners.machine] IdleCount = 5 IdleTime = 600 MaxBuilds = 100 MachineName = "auto-scale-%s" MachineDriver = "google" # Refer to Docker Machine docs on how to authenticate: https://docs.docker.com/machine/drivers/gce/#credentials MachineOptions = [ # Additional machine options can be added using the Google Compute Engine driver. # If you experience problems with an unreachable host (ex. "Waiting for SSH"), # you should remove optional parameters to help with debugging. # https://docs.docker.com/machine/drivers/gce/ "google-project=GOOGLE-PROJECT-ID", "google-zone=GOOGLE-ZONE", # e.g. 'us-central1-a', full list in https://cloud.google.com/compute/docs/regions-zones/ ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleCountMin = 5 IdleScaleFactor = 1.5 # Means that current number of Idle machines will be 1.5*in-use machines, # no more than 50 (the value of IdleCount) and no less than 5 (the value of IdleCountMin) IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` ### periods構文 {#periods-syntax} `Periods`設定は、cron形式で表される時間帯の文字列パターンを集めた配列です。行は次のフィールドで構成されます。 ```plaintext [second] [minute] [hour] [day of month] [month] [day of week] [year] ``` 標準のcron設定ファイルと同様に、これらのフィールドには単一値、範囲、リスト、およびアスタリスクを含めることができます。[構文の詳細な説明](https://github.com/gorhill/cronexpr#implementation)を参照してください。 ## `[runners.instance]`セクション {#the-runnersinstance-section} | パラメータ | 型 | 説明 | |------------------|--------|-------------| | `allowed_images` | 文字列 | VM分離が有効になっている場合、`allowed_images`はジョブが指定できるイメージを制御します。 | ## `[runners.autoscaler]`セクション {#the-runnersautoscaler-section} {{< history >}} - GitLab Runner v15.10.0で導入されました。 {{< /history >}} 次のパラメータは、オートスケーラー機能を設定します。これらのパラメータは、[インスタンス](../executors/instance.md) executorと[Docker Autoscaler](../executors/docker_autoscaler.md) executorでのみ使用できます。 | パラメータ | 説明 | |----------------------------------|-------------| | `capacity_per_instance` | 1つのインスタンスで同時に実行できるジョブの数。 | | `max_use_count` | インスタンスが削除対象としてスケジュールされる前にそのインスタンスを使用できる最大回数。 | | `max_instances` | 許可されるインスタンスの最大数。これは、インスタンスの状態(保留中、実行中、削除中)に関係なく適用されます。デフォルトは`0`(無制限)です。 | | `plugin` | 使用する[フリート](https://gitlab.com/gitlab-org/fleeting/fleeting)プラグイン。プラグインのインストール方法と参照方法について詳しくは、[フリートプラグインをインストールする](../fleet_scaling/fleeting.md#install-a-fleeting-plugin)を参照してください。 | | `delete_instances_on_shutdown` | GitLab Runnerのシャットダウン時に、プロビジョニングされたすべてのインスタンスを削除するかどうかを指定します。デフォルト: `false`。[GitLab Runner 15.11](https://gitlab.com/gitlab-org/fleeting/taskscaler/-/merge_requests/24)で導入されました。 | | `instance_ready_command` | オートスケーラーによってプロビジョニングされた各インスタンスでこのコマンドを実行して、インスタンスが使用できる状態になっていることを確認します。失敗すると、インスタンスが削除されます。[GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37473)で導入されました。 | | `instance_acquire_timeout` | Runnerがインスタンス取得を待機してタイムアウトになるまでの最大時間。デフォルト: `15m`(15分)。この値は、実際の環境に合わせて調整できます。[GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5563)で導入されました。 | | `update_interval` | フリートプラグインでインスタンスの更新を確認する間隔。デフォルト: `1m`(1分)。[GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4722)で導入されました。 | | `update_interval_when_expecting` | 状態が変化することが予期される場合にフリートプラグインでインスタンスの更新を確認する間隔。たとえば、インスタンスがインスタンスをプロビジョニングし、Runnerが`pending`から`running`への移行を待機している場合などです。デフォルト: `2s`(2秒)。[GitLab Runner 16.11](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4722)で導入されました。 | | `deletion_retry_interval` | 以前の削除試行が効果がなかった場合に、プラグインが削除を再試行するまで待機する間隔。デフォルト: `1m`(1分)。[GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777)で導入。 | | `shutdown_deletion_interval`| インスタンスを削除してからシャットダウン中にそれらのステータスをチェックするまでの間で使用される、フリーティングプラグインの間隔。デフォルト: `10s`(10秒)。[GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777)で導入。 | | `shutdown_deletion_retries` | シャットダウン前にインスタンスが削除を完了したことを確認するために、フリーティングプラグインが行う試行の最大数。デフォルト: `3`。[GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777)で導入。 | | `failure_threshold` | フリーティングプラグインがインスタンスを置き換えるまでに発生する、連続したヘルスの失敗の最大数。ハートビート機能も参照してください。デフォルト: `3`。[GitLab Runner 18.4](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5777)で導入。 | | `log_internal_ip` | VMの内部IPアドレスをCI/CDの出力ログに記録するかどうかを指定します。デフォルト: `false`。[GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5519)で導入されました。 | | `log_external_ip` | VMの外部IPアドレスをCI/CDの出力ログに記録するかどうかを指定します。デフォルト: `false`。[GitLab Runner 18.1](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5519)で導入されました。 | {{< alert type="note" >}} `instance_ready_command`がアイドル状態のスケールルールで頻繁に失敗する場合、Runnerがジョブを受け入れるよりも速くインスタンスが削除および作成される可能性があります。スケールスロットリングをサポートするため、[GitLab 17.0](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37497)で指数バックオフが追加されました。 {{< /alert >}} {{< alert type="note" >}} オートスケーラーの設定オプションは、設定が変更されても再読み込みされません。ただし、GitLab 17.5.0以降では、設定が変更されると、`[[runners.autoscaler.policy]]`エントリが再読み込されます。 {{< /alert >}} ## `[runners.autoscaler.plugin_config]`セクション {#the-runnersautoscalerplugin_config-section} このハッシュテーブルはJSONに再エンコードされ、設定済みのプラグインに直接渡されます。 [フリート](https://gitlab.com/gitlab-org/fleeting/fleeting)プラグインには通常、サポートされている設定に関するドキュメントが付いています。 ## `[runners.autoscaler.scale_throttle]`セクション {#the-runnersautoscalerscale_throttle-section} {{< history >}} - GitLab Runner v17.0.0で導入されました。 {{< /history >}} | パラメータ | 説明 | |-----------|-------------| | `limit` | 1秒あたりにプロビジョニングできる新しいインスタンスのレート制限。`-1`は無制限を意味します。デフォルト(`0`)では、制限が`100`に設定されます。 | | `burst` | 新しいインスタンスのバースト制限。デフォルトは`max_instances`に設定されるか、`max_instances`が設定されていない場合は`limit`に設定されます。`limit`が無制限の場合、`burst`は無視されます。 | ### `limit`と`burst`の関係 {#relationship-between-limit-and-burst} スケールスロットルは、トークンクォータシステムを使用してインスタンスを作成します。このシステムは、次の2つの値で定義されます。 - `burst`: クォータの最大サイズ。 - `limit`: 1秒あたりのクォータ更新レート。 一度に作成できるインスタンスの数は、残りのクォータによって決まります。十分なクォータがある場合は、その量までインスタンスを作成できます。クォータがなくなった場合は、1秒あたり`limit`の数のインスタンスを作成できます。インスタンスの作成が停止すると、クォータは1秒あたり`limit`ずつ、`burst`の値に達するまで増加します。 たとえば、`limit`が`1`で`burst`が`60`の場合は、次のようになります。 - 60個のインスタンスを即時に作成できますが、制限(スロットル)されます。 - 60秒待機すると、さらに60個のインスタンスを即時に作成できます。 - 待機しない場合は、1秒ごとに1つのインスタンスを作成できます。 ## `[runners.autoscaler.connector_config]`セクション {#the-runnersautoscalerconnector_config-section} [フリート](https://gitlab.com/gitlab-org/fleeting/fleeting)プラグインには通常、サポートされている接続オプションに関するドキュメントが付いています。 プラグインはコネクタ設定を自動的に更新します。`[runners.autoscaler.connector_config]`を使用して、コネクタ設定の自動更新を上書きしたり、プラグインが判断できない空の値を入力したりできます。 | パラメータ | 説明 | |--------------------------|-------------| | `os` | インスタンスのオペレーティングシステム。 | | `arch` | インスタンスのアーキテクチャ。 | | `protocol` | `ssh`、`winrm`、または`winrm+https`。Windowsが検出された場合、デフォルトで`winrm`が使用されます。 | | `protocol_port` | 指定されたプロトコルに基づいて接続を確立するために使用されるポート。デフォルトは`ssh:22`、`winrm+http:5985`、`winrm+https:5986`です。 | | `username` | 接続に使用するユーザー名。 | | `password` | 接続に使用するパスワード。 | | `key_path` | 接続に使用するTLSキー、または動的にプロビジョニングされた認証情報に使用するTLSキー。 | | `use_static_credentials` | 自動認証情報プロビジョニングが無効になっています。デフォルト: `false`。 | | `keepalive` | 接続キープアライブ時間。 | | `timeout` | 接続タイムアウト時間。 | | `use_external_addr` | プラグインが提供する外部アドレスを使用するかどうか。プラグインが内部アドレスのみを返す場合は、この設定に関係なく内部アドレスが使用されます。デフォルト: `false`。 | ## `[runners.autoscaler.state_storage]`セクション {#the-runnersautoscalerstate_storage-section} {{< details >}} - ステータス: ベータ版 {{< /details >}} {{< history >}} - GitLab Runner 17.5.0で導入されました。 {{< /history >}} ステートストレージが無効になっている場合(デフォルト)、GitLab Runnerが起動すると、安全上の理由から既存のフリートインスタンスは直ちに削除されます。たとえば、`max_use_count`が`1`に設定されている場合、使用状態がわからないと、すでに使用されているインスタンスに誤ってジョブを割り当ててしまう可能性があります。 ステートストレージ機能を有効にすると、インスタンスの状態をローカルディスクに保持できます。この場合、GitLab Runnerの起動時にインスタンスが存在していても、そのインスタンスは削除されません。キャッシュされた接続の詳細、使用回数、およびその他の設定が復元されます。 ステートストレージ機能を有効にする場合は、次の点を考慮してください。 - インスタンスの認証の詳細(ユーザー名、パスワード、キー)はディスクに残ります。 - インスタンスがジョブをアクティブに実行しているときにそのインスタンスが復元されると、GitLab Runnerはデフォルトでそのインスタンスを削除します。GitLab Runnerがジョブを再開できないため、この動作により安全性が確保されます。インスタンスを維持するには、`keep_instance_with_acquisitions`を`true`に設定します。 インスタンスで進行中のジョブについて特に懸念していない場合には、`keep_instance_with_acquisitions`を`true`に設定すると役立ちます。また、`instance_ready_command`設定オプションを使用して環境をクリーンアップし、インスタンスを維持することもできます。この場合、実行中のすべてのコマンドを停止したり、Dockerコンテナを強制的に削除したりすることがあります。 | パラメータ | 説明 | |-----------------------------------|-------------| | `enabled` | ステートストレージを有効にするかどうか。デフォルト: `false`。 | | `dir` | ステートストアディレクトリ。このディレクトリの中に、各Runner設定エントリに対応するサブディレクトリがあります。デフォルトは、Gitlab Runner設定ファイルディレクトリ内の`.taskscaler`です。 | | `keep_instance_with_acquisitions` | アクティブなジョブがあるインスタンスを削除するかどうか。デフォルト: `false`。 | ## `[[runners.autoscaler.policy]]`セクション {#the-runnersautoscalerpolicy-sections} **注** \- ここでの`idle_count`はジョブの数を示し、従来のオートスケール方式のようにオートスケールされたマシンの数ではありません。 | パラメータ | 説明 | |----------------------|-------------| | `periods` | このポリシーが有効になっている期間を示すunix-cron形式の文字列の配列。デフォルト: `* * * * *` | | `timezone` | unix-cron期間の評価時に使用されるタイムゾーン。デフォルト: システムのローカルタイムゾーン。 | | `idle_count` | ジョブで即時利用可能であるべき目標アイドル容量。 | | `idle_time` | インスタンスが終了するまでにアイドル状態でいられる時間。 | | `scale_factor` | `idle_count`に加えて、ジョブで即時利用可能であるべき目標アイドル容量を、現在の使用中の容量の係数として表したもの。`0.0`がデフォルトです。 | | `scale_factor_limit` | `scale_factor`の計算から得られる最大容量。 | | `preemptive_mode` | プリエンプティブモードがオンになっている場合、ジョブがリクエストされるのは、インスタンスが使用可能であることが確認された場合だけです。この動作により、プロビジョニングの遅延なしに、ほぼすぐにジョブを開始できます。プリエンプティブモードがオフになっている場合、まずジョブがリクエストされた後、次にシステムが必要なキャパシティを検出したりプロビジョニングしたりしようとします。 | アイドル状態のインスタンスを削除するかどうかを決定するために、taskscalerは`idle_time`をインスタンスのアイドル期間と比較します。各インスタンスのアイドル期間は、インスタンスが次の操作を行った時点から計算されます。 - 最後にジョブを完了した時点(インスタンスが以前に使用されていた場合)。 - プロビジョニングされた時点(未使用の場合)。 このチェックは、スケーリングイベント中に発生します。設定されている`idle_time`を超えるインスタンスは、必要な`idle_count`ジョブキャパシティを維持するために必要な場合を除き、削除されます。 `scale_factor`を設定すると、`idle_count`が最小の`idle`容量になり、`scaler_factor_limit`が最大の`idle`容量になります。 複数のポリシーを定義できます。最後に一致したポリシーが使用されます。 次の例では、アイドルカウント`1`は、月曜日から金曜日の08:00から15:59の間に使用されます。それ以外の場合、アイドルカウントは0です。 ```toml [[runners.autoscaler.policy]] idle_count = 0 idle_time = "0s" periods = ["* * * * *"] [[runners.autoscaler.policy]] idle_count = 1 idle_time = "30m0s" periods = ["* 8-15 * * mon-fri"] ``` ### periods構文 {#periods-syntax-1} `periods`設定には、ポリシーが有効になっている期間を示す、unix-cron形式の文字列の配列が含まれています。cron形式は、次の5つのフィールドで構成されています。 ```plaintext ┌────────── minute (0 - 59) │ ┌──────── hour (0 - 23) │ │ ┌────── day of month (1 - 31) │ │ │ ┌──── month (1 - 12) │ │ │ │ ┌── day of week (1 - 7 or MON-SUN, 0 is an alias for Sunday) * * * * * ``` - `-`は、2つの数値の間で範囲を指定するときに使用できます。 - `*`は、そのフィールドの有効な値の範囲全体を表すときに使用できます。 - `/`に続く数字は、範囲内でその数字ごとにスキップするときに範囲の後に使用できます。たとえば、hourフィールドに0-12/2と指定すると、00:00から00:12の間、2時間ごとに期間がアクティブになります。 - `,`は、フィールドの有効な数値または範囲のリストを区切るときに使用できます。たとえば、`1,2,6-9`などです。 このcronジョブは時間の範囲を表していることを覚えておいてください。例: | 期間 | 効果 | |----------------------|--------| | `1 * * * * *` | 1時間ごとに1分間にわたってルールが有効になります(非常に効果的である可能性は低い) | | `* 0-12 * * *` | 毎日の開始時に12時間にわたってルールが有効になります | | `0-30 13,16 * * SUN` | 毎週日曜日の午後1時に30分間、午後4時に30分間にわたってルールが有効になります | ## `[runners.autoscaler.vm_isolation]`セクション {#the-runnersautoscalervm_isolation-section} VM分離は[`nesting`](../executors/instance.md#nested-virtualization)を使用し、これはmacOSでのみサポートされています。 | パラメータ | 説明 | |------------------|-------------| | `enabled` | VM分離を有効にするかどうかを指定します。デフォルト: `false`。 | | `nesting_host` | `nesting`デーモンホスト。 | | `nesting_config` | `nesting`設定。JSONにシリアル化され、`nesting`デーモンに送信されます。 | | `image` | ジョブイメージが指定されていない場合に、nestingデーモンで使用されるデフォルトイメージ。 | ## `[runners.autoscaler.vm_isolation.connector_config]`セクション {#the-runnersautoscalervm_isolationconnector_config-section} `[runners.autoscaler.vm_isolation.connector_config]`セクションのパラメータは、[`[runners.autoscaler.connector_config]`](#the-runnersautoscalerconnector_config-section)セクションと同じですが、オートスケールされたインスタンスではなく、`nesting`でプロビジョニングされた仮想マシンへの接続に使用されます。 ## `[runners.custom]`セクション {#the-runnerscustom-section} 次のパラメータは、[カスタムexecutor](../executors/custom.md)の設定を定義します。 | パラメータ | 型 | 説明 | |-------------------------|--------------|-------------| | `config_exec` | 文字列 | 実行可能ファイルのパス。これにより、ユーザーはジョブ開始前に一部の設定を上書きできます。これらの値は、[`[[runners]]`](#the-runners-section)セクションで設定されている値を上書きします。一覧は[Custom executorのドキュメント](../executors/custom.md#config)にあります。 | | `config_args` | 文字列配列 | `config_exec`実行可能ファイルに渡される最初の引数セット。 | | `config_exec_timeout` | 整数 | `config_exec`の実行が完了するまでのタイムアウト(秒)。デフォルトは3600秒(1時間)。 | | `prepare_exec` | 文字列 | 環境を準備するための実行可能ファイルのパス。 | | `prepare_args` | 文字列配列 | `prepare_exec`実行可能ファイルに渡される最初の引数セット。 | | `prepare_exec_timeout` | 整数 | `prepare_exec`の実行が完了するまでのタイムアウト(秒)。デフォルトは3600秒(1時間)。 | | `run_exec` | 文字列 | **必須**。環境内でスクリプトを実行するための実行可能ファイルのパス。たとえば、クローンスクリプトやビルドスクリプトなどです。 | | `run_args` | 文字列配列 | `run_exec`実行可能ファイルに渡される最初の引数セット。 | | `cleanup_exec` | 文字列 | 環境をクリーンアップするための実行可能ファイルのパス。 | | `cleanup_args` | 文字列配列 | `cleanup_exec`実行可能ファイルに渡される最初の引数セット。 | | `cleanup_exec_timeout` | 整数 | `cleanup_exec`の実行が完了するまでのタイムアウト(秒)。デフォルトは3600秒(1時間)。 | | `graceful_kill_timeout` | 整数 | `prepare_exec`と`cleanup_exec`が(ジョブのキャンセル中などに)終了した場合に待機する時間(秒)。このタイムアウト後に、プロセスが強制終了されます。デフォルトは600秒(10分)。 | | `force_kill_timeout` | 整数 | kill(強制終了)シグナルがスクリプトに送信された後に待機する時間(秒)。デフォルトは600秒(10分)。 | ## `[runners.cache]`セクション {#the-runnerscache-section} 次のパラメータは、分散キャッシュ機能を定義します。詳細については、[Runnerオートスケールに関するドキュメント](autoscale.md#distributed-runners-caching)を参照してください。 | パラメータ | 型 | 説明 | |--------------------------|---------|-------------| | `Type` | 文字列 | `s3`、`gcs`、`azure`のいずれか。 | | `Path` | 文字列 | キャッシュURLの先頭に付加するパスの名前。 | | `Shared` | ブール値 | Runner間でのキャッシュ共有を有効にします。デフォルトは`false`です。 | | `MaxUploadedArchiveSize` | int64 | クラウドストレージにアップロードされるキャッシュアーカイブの制限(バイト単位)。悪意のあるアクターはこの制限を回避できるため、GCSアダプターは署名付きURLのX-Goog-Content-Length-Rangeヘッダーによってこの制限を適用します。クラウドストレージプロバイダーにも制限を設定する必要があります。 | 以下の環境変数を使用して、キャッシュの圧縮を設定できます: | 変数 | 説明 | デフォルト | 値 | |----------------------------|---------------------------------------|-----------|-------------------------------------------------| | `CACHE_COMPRESSION_FORMAT` | キャッシュアーカイブの圧縮形式 | `zip` | `zip`、`tarzstd` | | `CACHE_COMPRESSION_LEVEL` | キャッシュアーカイブの圧縮レベル | `default` | `fastest`、`fast`、`default`、`slow`、`slowest` | `tarzstd`形式は、`zip`よりも優れた圧縮率を提供する、Zstandard圧縮でTARを使用します。圧縮レベルの範囲は、`fastest`(最大速度を実現するための最小圧縮)から`slowest`(最小ファイルサイズを実現するための最大圧縮)です。`default`レベルは、圧縮率と速度のバランスの取れたトレードオフを提供します。 例: ```yaml job: variables: CACHE_COMPRESSION_FORMAT: tarzstd CACHE_COMPRESSION_LEVEL: fast ``` キャッシュメカニズムは、事前署名付きURLを使用してキャッシュをアップロードおよびダウンロードします。GitLab Runnerがそれ自体のインスタンスでURLに署名します。ジョブのスクリプト(キャッシュのアップロード/ダウンロードスクリプトを含む)がローカルマシンまたは外部マシンで実行されるかどうかは関係ありません。たとえば、`shell` executorや`docker` executorは、GitLab Runnerプロセスが実行されているマシンでスクリプトを実行します。一方で`virtualbox`や`docker+machine`は、別のVMに接続してスクリプトを実行します。このプロセスは、キャッシュアダプターの認証情報が漏洩する可能性を最小限に抑えるというセキュリティ上の理由によるものです。 [S3キャッシュアダプター](#the-runnerscaches3-section)がIAMインスタンスプロファイルを使用するように設定されている場合、このアダプターはGitLab Runnerマシンに接続されているプロファイルを使用します。[GCSキャッシュアダプター](#the-runnerscachegcs-section)が`CredentialsFile`を使用するように設定されている場合も同様です。このファイルがGitLab Runnerマシンに存在している必要があります。 次の表に、`config.toml`、`register`のCLIオプションおよび環境変数を示します。これらの環境変数を定義すると、新しいGitLab Runnerを登録した後に、値が`config.toml`に保存されます。 `config.toml`からS3の認証情報を省略し、環境変数から静的な認証情報を読み込む場合は、`AWS_ACCESS_KEY_ID`と`AWS_SECRET_ACCESS_KEY`を定義できます。詳細については、[AWS SDKデフォルト認証情報チェーンセクション](#aws-sdk-default-credential-chain)を参照してください。 | 設定 | TOMLフィールド | `register`のCLIオプション | `register`の環境変数 | |--------------------------------|---------------------------------------------------|--------------------------------------------|-------------------------------------| | `Type` | `[runners.cache] -> Type` | `--cache-type` | `$CACHE_TYPE` | | `Path` | `[runners.cache] -> Path` | `--cache-path` | `$CACHE_PATH` | | `Shared` | `[runners.cache] -> Shared` | `--cache-shared` | `$CACHE_SHARED` | | `S3.ServerAddress` | `[runners.cache.s3] -> ServerAddress` | `--cache-s3-server-address` | `$CACHE_S3_SERVER_ADDRESS` | | `S3.AccessKey` | `[runners.cache.s3] -> AccessKey` | `--cache-s3-access-key` | `$CACHE_S3_ACCESS_KEY` | | `S3.SecretKey` | `[runners.cache.s3] -> SecretKey` | `--cache-s3-secret-key` | `$CACHE_S3_SECRET_KEY` | | `S3.SessionToken` | `[runners.cache.s3] -> SessionToken` | `--cache-s3-session-token` | `$CACHE_S3_SESSION_TOKEN` | | `S3.BucketName` | `[runners.cache.s3] -> BucketName` | `--cache-s3-bucket-name` | `$CACHE_S3_BUCKET_NAME` | | `S3.BucketLocation` | `[runners.cache.s3] -> BucketLocation` | `--cache-s3-bucket-location` | `$CACHE_S3_BUCKET_LOCATION` | | `S3.Insecure` | `[runners.cache.s3] -> Insecure` | `--cache-s3-insecure` | `$CACHE_S3_INSECURE` | | `S3.AuthenticationType` | `[runners.cache.s3] -> AuthenticationType` | `--cache-s3-authentication_type` | `$CACHE_S3_AUTHENTICATION_TYPE` | | `S3.ServerSideEncryption` | `[runners.cache.s3] -> ServerSideEncryption` | `--cache-s3-server-side-encryption` | `$CACHE_S3_SERVER_SIDE_ENCRYPTION` | | `S3.ServerSideEncryptionKeyID` | `[runners.cache.s3] -> ServerSideEncryptionKeyID` | `--cache-s3-server-side-encryption-key-id` | `$CACHE_S3_SERVER_SIDE_ENCRYPTION_KEY_ID` | | `S3.DualStack` | `[runners.cache.s3] -> DualStack` | `--cache-s3-dual-stack` | `$CACHE_S3_DUAL_STACK` | | `S3.Accelerate` | `[runners.cache.s3] -> Accelerate` | `--cache-s3-accelerate` | `$CACHE_S3_ACCELERATE` | | `S3.PathStyle` | `[runners.cache.s3] -> PathStyle` | `--cache-s3-path-style` | `$CACHE_S3_PATH_STYLE` | | `S3.RoleARN` | `[runners.cache.s3] -> RoleARN` | `--cache-s3-role-arn` | `$CACHE_S3_ROLE_ARN` | | `S3.UploadRoleARN` | `[runners.cache.s3] -> UploadRoleARN` | `--cache-s3-upload-role-arn` | `$CACHE_S3_UPLOAD_ROLE_ARN` | | `GCS.AccessID` | `[runners.cache.gcs] -> AccessID` | `--cache-gcs-access-id` | `$CACHE_GCS_ACCESS_ID` | | `GCS.PrivateKey` | `[runners.cache.gcs] -> PrivateKey` | `--cache-gcs-private-key` | `$CACHE_GCS_PRIVATE_KEY` | | `GCS.CredentialsFile` | `[runners.cache.gcs] -> CredentialsFile` | `--cache-gcs-credentials-file` | `$GOOGLE_APPLICATION_CREDENTIALS` | | `GCS.BucketName` | `[runners.cache.gcs] -> BucketName` | `--cache-gcs-bucket-name` | `$CACHE_GCS_BUCKET_NAME` | | `Azure.AccountName` | `[runners.cache.azure] -> AccountName` | `--cache-azure-account-name` | `$CACHE_AZURE_ACCOUNT_NAME` | | `Azure.AccountKey` | `[runners.cache.azure] -> AccountKey` | `--cache-azure-account-key` | `$CACHE_AZURE_ACCOUNT_KEY` | | `Azure.ContainerName` | `[runners.cache.azure] -> ContainerName` | `--cache-azure-container-name` | `$CACHE_AZURE_CONTAINER_NAME` | | `Azure.StorageDomain` | `[runners.cache.azure] -> StorageDomain` | `--cache-azure-storage-domain` | `$CACHE_AZURE_STORAGE_DOMAIN` | ### キャッシュキーの処理 {#cache-key-handling} {{< history >}} - [導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5751):GitLab Runner v18.4.0。 {{< /history >}} GitLab Runner 18.4.0以降では、`FF_HASH_CACHE_KEYS` [機能フラグ](feature-flags.md)を使用してキャッシュキーにハッシュを付けることができます。 `FF_HASH_CACHE_KEYS`がオフになっている場合(デフォルト)、GitLab Runnerはキャッシュキーをサニタイズしてから、ローカルのキャッシュファイルとストレージバケット内のオブジェクトの両方のパスをビルドするために使用します。サニタイズによってキャッシュキーが変更された場合、GitLab Runnerはこの変更をログに記録します。GitLab Runnerがキャッシュキーをサニタイズできない場合、これもログに記録し、この特定のキャッシュは使用しません。 この機能フラグをオンにすると、GitLab Runnerはキャッシュキーにハッシュを付けてから、ローカルのキャッシュアーティファクトとリモートストレージバケット内のオブジェクトのパスをビルドするために使用します。GitLab Runnerは、キャッシュキーをサニタイズしません。どのキャッシュキーが特定のキャッシュアーティファクトを作成したかを理解できるように、GitLab Runnerはメタデータを添付します: - ローカルのキャッシュアーティファクトの場合、GitLab Runnerは、キャッシュアーティファクト`cache.zip`の横に`metadata.json`ファイルを配置し、次のコンテンツを含めます: ```json {"cachekey": "the human readable cache key"} ``` - 分散キャッシュのキャッシュアーティファクトの場合、GitLab Runnerはメタデータをストレージオブジェクトblobに直接添付し、キー`cachekey`を付与します。クラウドプロバイダーのメカニズムを使用してクエリできます。例については、AWS S3の[ユーザー定義オブジェクトメタデータ](https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingMetadata.html#UserMetadata)を参照してください。 {{< alert type="warning" >}} `FF_HASH_CACHE_KEYS`を変更すると、ハッシュでキャッシュキーによってキャッシュアーティファクトの名前と場所が変更されるため、GitLab Runnerは既存のキャッシュアーティファクトを無視します。この変更は、`FF_HASH_CACHE_KEYS=true`から`FF_HASH_CACHE_KEYS=false`、およびその逆に、両方向に適用されます。 分散キャッシュを共有する複数のRunnerを実行しているが、`FF_HASH_CACHE_KEYS`の設定が異なる場合、キャッシュアーティファクトは共有されません。 したがって、ベストプラクティスは次のとおりです: - 分散キャッシュを共有するRunner間で`FF_HASH_CACHE_KEYS`を同期した状態に保ちます。 - `FF_HASH_CACHE_KEYS`を変更した後、キャッシュミス、キャッシュアーティファクトの再ビルド、および最初のジョブの実行時間が長くなることを想定します。 {{< /alert >}} {{< alert type="warning" >}} `FF_HASH_CACHE_KEYS`をオンにしても、(ヘルパーイメージを以前のバージョンに固定したなどの理由で)以前のバージョンのヘルパーバイナリを実行すると、キャッシュキーへのハッシュの適用と、キャッシュのアップロードまたはダウンロードは引き続き機能します。ただし、GitLab Runnerはキャッシュアーティファクトのメタデータを保持しません。 {{< /alert >}} ### `[runners.cache.s3]`セクション {#the-runnerscaches3-section} 次のパラメータは、キャッシュ用のS3ストレージを定義します。 | パラメータ | 型 | 説明 | |-----------------------------|---------|-------------| | `ServerAddress` | 文字列 | S3互換サーバーの`host:port`。AWS以外のサーバーを使用している場合は、ストレージ製品のドキュメントを参照して、正しいアドレスを確認してください。DigitalOceanの場合、アドレスの形式は`spacename.region.digitaloceanspaces.com`である必要があります。 | | `AccessKey` | 文字列 | S3インスタンス用に指定されたアクセスキー。 | | `SecretKey` | 文字列 | S3インスタンス用に指定されたシークレットキー。 | | `SessionToken` | 文字列 | 一時的な認証情報を使用する場合に、S3インスタンス用に指定されたセッショントークン。 | | `BucketName` | 文字列 | キャッシュが保存されるストレージバケットの名前。 | | `BucketLocation` | 文字列 | S3リージョンの名前。 | | `Insecure` | ブール値 | S3サービスが`HTTP`で利用可能な場合は、`true`に設定します。デフォルトは`false`です。 | | `AuthenticationType` | 文字列 | `iam`または`access-key`に設定します。`ServerAddress`、`AccessKey`、および`SecretKey`がすべて指定されている場合、デフォルトは`access-key`です。`ServerAddress`、`AccessKey`、または`SecretKey`が指定されていない場合、デフォルトは`iam`です。 | | `ServerSideEncryption` | 文字列 | S3で使用するサーバー側の暗号化の種類。GitLab 15.3以降で使用可能な種類は、`S3`または`KMS`です。GitLab 17.5以降では、[`DSSE-KMS`](https://docs.aws.amazon.com/AmazonS3/latest/userguide/UsingDSSEncryption.html)がサポートされています。 | | `ServerSideEncryptionKeyID` | 文字列 | KMSを使用する場合に暗号化に使用されるKMSキーのエイリアス、ID、またはAmazonリソースネーム。エイリアスを使用する場合は、`alias/`をプレフィックスとして付けます。クロスアカウントシナリオでは、ARN形式を使用します。GitLab 15.3以降で利用可能です。 | | `DualStack` | ブール値 | IPv4およびIPv6エンドポイントを有効にします。デフォルトは`true`です。AWS S3 Expressを使用している場合は、この設定を無効にしてください。`ServerAddress`を設定すると、GitLabはこの設定を無視します。GitLab 17.5以降で利用可能です。 | | `Accelerate` | ブール値 | AWS S3 Transfer Acceleration(転送高速化)を有効にします。`ServerAddress`がAccelerated(高速化)エンドポイントとして設定されている場合、GitLabは自動的にこれを`true`に設定します。GitLab 17.5以降で利用可能です。 | | `PathStyle` | ブール値 | パス形式のアクセスを有効にします。デフォルトでは、GitLabは`ServerAddress`の値に基づいてこの設定を自動的に検出します。GitLab 17.5以降で利用可能です。 | | `UploadRoleARN` | 文字列 | 非推奨。代わりに`RoleARN`を使用してください。時間制限付きの`PutObject` S3リクエストを生成するために`AssumeRole`で使用できるAWSロールARNを指定します。S3マルチパートアップロードを有効にします。GitLab 17.5以降で利用可能です。 | | `RoleARN` | 文字列 | 時間制限付きの`GetObject`と`PutObject` S3リクエストを生成するために`AssumeRole`で使用できるAWSロールARNを指定します。S3マルチパート転送を有効にします。GitLab 17.8以降で利用可能です。 | 例: ```toml [runners.cache] Type = "s3" Path = "path/to/prefix" Shared = false [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "AWS_S3_ACCESS_KEY" SecretKey = "AWS_S3_SECRET_KEY" BucketName = "runners-cache" BucketLocation = "eu-west-1" Insecure = false ServerSideEncryption = "KMS" ServerSideEncryptionKeyID = "alias/my-key" ``` ## 認証 {#authentication} GitLab Runnerは、設定に基づいてS3に異なる認証方法を使用します。 ### 静的な認証情報 {#static-credentials} Runnerは、次の場合に静的アクセスキー認証を使用します: - `ServerAddress`、`AccessKey`、および`SecretKey`パラメータが仕様されていますが、`AuthenticationType`は提供されていません。 - `AuthenticationType = "access-key"`が明示的に設定されています。 ### AWS SDKのデフォルト認証情報チェーン {#aws-sdk-default-credential-chain} Runnerは、次の場合に[AWS SDKのデフォルト認証情報チェーン](https://docs.aws.amazon.com/sdk-for-go/v1/developer-guide/configuring-sdk.html#specifying-credentials)を使用します: - `ServerAddress`、`AccessKey`、または`SecretKey`のいずれかが省略され、`AuthenticationType`が提供されていません。 - `AuthenticationType = "iam"`が明示的に設定されています。 この認証情報チェーンは、次の順序で認証を試みます: 1. 環境変数(`AWS_ACCESS_KEY_ID`、`AWS_SECRET_ACCESS_KEY`) 1. 共有認証情報ファイル(`~/.aws/credentials`) 1. IAMインスタンスプロファイル(EC2インスタンスの場合) 1. SDKでサポートされている他のAWS認証情報ソース `RoleARN`が仕様されていない場合、デフォルトの認証情報チェーンはRunnerマネージャーによって実行されます。これは、ビルドが実行されるマシンと同じマシン上にあるとは限りません。たとえば、[オートスケールする](autoscale.md)の設定では、ジョブは別のマシンで実行されます。同様に、Kubernetesエグゼキューターを使用すると、ビルドポッドもRunnerマネージャーとは異なるノードで実行できます。この動作により、Runnerマネージャーにのみバケットレベルのアクセス権を付与できます。 `RoleARN`が仕様されている場合、認証情報はヘルパーイメージの実行コンテキスト内で解決されます。詳細については、[RoleARN](#enable-multipart-transfers-with-rolearn)を参照してください。 Helmチャートを使用してGitLab Runnerをインストールし、`rbac.create`が`values.yaml`ファイルで`true`に設定されている場合、サービスアカウントが作成されます。サービスアカウントの注釈は、`rbac.serviceAccountAnnotations`セクションから取得されます。 Amazon EKSのRunnerの場合、サービスアカウントに割り当てるIAMロールを指定できます。必要な特定のアノテーションは`eks.amazonaws.com/role-arn: arn:aws:iam:::role/`です。 このロールのIAMポリシーには、指定されたバケットに対して次のアクションを実行する権限が必要です。 - `s3:PutObject` - `s3:GetObjectVersion` - `s3:GetObject` - `s3:DeleteObject` - `s3:ListBucket` `KMS`タイプの`ServerSideEncryption`を使用する場合、このロールには、指定されたAWS KMSキーに対して次のアクションを実行する権限も必要です。 - `kms:Encrypt` - `kms:Decrypt` - `kms:ReEncrypt*` - `kms:GenerateDataKey*` - `kms:DescribeKey` `SSE-C`タイプの`ServerSideEncryption`はサポートされていません。`SSE-C`では、事前署名付きURLに加えて、ユーザー提供のキーを含むヘッダーをダウンロードリクエストに対して指定する必要があります。これは、ジョブにキーマテリアルを渡すことになり、キーの安全を保証できません。これにより、復号化キーが漏洩する可能性があります。この問題に関するディスカッションについては、[このマージリクエスト](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3295)を参照してください。 {{< alert type="note" >}} AWS S3キャッシュにアップロードできる単一ファイルの最大サイズは5 GBです。この動作に対する潜在的な回避策についてのディスカッションについては、[このイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26921)を参照してください。 {{< /alert >}} #### Runnerキャッシュ用のS3バケットでKMSキー暗号化を使用する {#use-kms-key-encryption-in-s3-bucket-for-runner-cache} `GenerateDataKey` APIはKMS対称キーを使用して、クライアント側の暗号化()用のデータキーを作成します。KMSキーの正しい設定は次のとおりです。 | 属性 | 説明 | |-----------|-------------| | キータイプ | 対称 | | 生成元 | `AWS_KMS` | | キー仕様 | `SYMMETRIC_DEFAULT` | | キーの用途 | 暗号化と復号化 | `rbac.serviceAccountName`で定義されたServiceAccountに割り当てられたロールのIAMポリシーには、KMSキーに対して次のアクションを実行する権限が必要です。 - `kms:GetPublicKey` - `kms:Decrypt` - `kms:Encrypt` - `kms:DescribeKey` - `kms:GenerateDataKey` #### `RoleARN`でマルチパート転送を有効にする {#enable-multipart-transfers-with-rolearn} キャッシュへのアクセスを制限するために、Runnerマネージャーは時間制限のある[事前署名付きURL](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html)を生成し、ジョブがキャッシュからのダウンロードやキャッシュへアップロードを行えるようにします。ただし、AWS S3では[1つのPUTリクエストが5 GBに制限されています](https://docs.aws.amazon.com/AmazonS3/latest/userguide/upload-objects.html)。5 GBを超えるファイルの場合は、マルチパートアップロードAPIを使用する必要があります。 マルチパート転送は、AWS S3でのみサポートされており、他のS3プロバイダーではサポートされていません。Runnerマネージャーはさまざまなプロジェクトのジョブを処理することから、バケット全体の権限を含むS3認証情報を渡すことができません。代わりに、Runnerマネージャーは時間制限のある事前署名付きURLと範囲が限定された認証情報を使用して、特定のオブジェクトへのアクセスを制限します。 AWSでS3マルチパート転送を使用するには、`RoleARN`に`arn:aws:iam::::`形式でIAMロールを指定します。このロールは、バケット内の特定のblobへの書き込みに限定された、時間制限のあるAWS認証情報を生成します。元のS3認証情報が、指定された`RoleARN`の`AssumeRole`にアクセスできることを確認してください。 `RoleARN`で指定されたIAMロールには、次の権限が必要です。 - `BucketName`で指定されたバケットへの`s3:GetObject`アクセス権。 - `BucketName`で指定されたバケットへの`s3:PutObject`アクセス権。 - `BucketName`で指定されたバケットへの`s3:ListBucket`アクセス権。 - KMSまたはDSSE-KMSを使用したサーバー側の暗号化が有効になっている場合は、`kms:Decrypt`と`kms:GenerateDataKey`権限。 たとえば、ARN `arn:aws:iam::1234567890123:role/my-instance-role`を持つEC2インスタンスに`my-instance-role`という名前のIAMロールが添付されているとします。 この場合、`BucketName`に対して`s3:PutObject`権限のみを持つ新しいロール`arn:aws:iam::1234567890123:role/my-upload-role`を作成できます。`my-instance-role`のAWS設定では、`Trust relationships`は次のようになります。 ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam::1234567890123:role/my-upload-role" }, "Action": "sts:AssumeRole" } ] } ``` `my-instance-role`を`RoleARN`として再利用して、新しいロールの作成を回避することもできます。その場合は、`my-instance-role`に`AssumeRole`権限があることを確認してください。たとえば、EC2インスタンスに関連付けられているIAMプロファイルの`Trust relationships`は次のようになります。 ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": { "Service": "ec2.amazonaws.com", "AWS": "arn:aws:iam::1234567890123:role/my-instance-role" }, "Action": "sts:AssumeRole" } ] } ``` AWSコマンドラインインターフェースを使用して、インスタンスに`AssumeRole`権限があることを確認できます。例: ```shell aws sts assume-role --role-arn arn:aws:iam::1234567890123:role/my-upload-role --role-session-name gitlab-runner-test1 ``` ##### `RoleARN`によるアップロードの仕組み {#how-uploads-work-with-rolearn} `RoleARN`が設定されている場合、Runnerがキャッシュにアップロードするたびに次の処理が行われます。 1. Runnerマネージャーは、(`AuthenticationType`、`AccessKey`、`SecretKey`で指定された)元のS3認証情報を取得します。 1. RunnerマネージャーはこのS3認証情報を使用して、Amazon Security Token Service(STS)に`RoleARN`を使った[`AssumeRole`](https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html)のリクエストを送信します。ポリシーリクエストは次のようになります。 ```json { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Action": ["s3:PutObject"], "Resource": "arn:aws:s3:::/" } ] } ``` 1. リクエストが成功した場合、Runnerマネージャーは制限付きセッションで一時的なAWS認証情報を取得します。 1. Runnerマネージャーは、これらの認証情報とURLを`s3:///`形式でキャッシュアーカイバーに渡し、キャッシュアーカイバーがファイルをアップロードします。 #### Kubernetes ServiceAccountリソース用のIAMロールを有効にする {#enable-iam-roles-for-kubernetes-serviceaccount-resources} サービスアカウントにIAMロールを使用するには、IAM OIDCプロバイダーが[クラスター用に存在する必要があります](https://docs.aws.amazon.com/eks/latest/userguide/enable-iam-roles-for-service-accounts.html)。IAM OIDCプロバイダーがクラスターに関連付けられたら、IAMロールを作成してRunnerのサービスアカウントに関連付けることができます。 1. **Create Role**(ロール作成)画面の**Select type of trusted entity**(信頼されたエンティティのタイプを選択)で、**Web Identity**(Web ID)を選択します。 1. ロールの**Trusted Relationships**(信頼関係)タブで次のようにします。 - **Trusted entities**(信頼されたエンティティ)セクションの形式は`arn:aws:iam:::oidc-provider/oidc.eks..amazonaws.com/id/`である必要があります。**OIDC ID**は、Amazon EKSクラスターの**Configuration**(設定)タブにあります。 - **Condition**(条件)セクションには、`rbac.serviceAccountName`で定義されたGitLab Runnerサービスアカウント、または`rbac.create`が`true`に設定されている場合に作成されるデフォルトのサービスアカウントが必要です。 | 条件 | キー | 値 | |----------------|--------------------------------------------------------|-------| | `StringEquals` | `oidc.eks..amazonaws.com/id/:sub` | `system:serviceaccount::` | #### S3 Express One Zoneバケットを使用する {#use-s3-express-one-zone-buckets} {{< history >}} - GitLab Runner 17.5.0で導入されました。 {{< /history >}} {{< alert type="note" >}} Runnerマネージャーが1つの特定のオブジェクトに対するアクセスを制限できないため、[S3 Express One Zoneディレクトリバケットは`RoleARN`では機能しません](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38484#note_2313111840)。 {{< /alert >}} 1. [Amazonのチュートリアル](https://docs.aws.amazon.com/AmazonS3/latest/userguide/s3-express-getting-started.html)に従って、S3 Express One Zoneバケットを設定します。 1. `BucketName`と`BucketLocation`を使用して`config.toml`を設定します。 1. S3 Expressはデュアルスタックエンドポイントをサポートしていないため、`DualStack`を`false`に設定します。 `config.toml`の例 ```toml [runners.cache] Type = "s3" [runners.cache.s3] BucketName = "example-express--usw2-az1--x-s3" BucketLocation = "us-west-2" DualStack = false ``` ### `[runners.cache.gcs]`セクション {#the-runnerscachegcs-section} 次のパラメータは、Google Cloud Storageのネイティブサポートを定義します。これらの値の詳細については、[Google Cloud Storage(GCS)の認証に関するドキュメント](https://docs.cloud.google.com/storage/docs/authentication#service_accounts)を参照してください。 | パラメータ | 型 | 説明 | |-------------------|--------|-------------| | `CredentialsFile` | 文字列 | Google JSONキーファイルのパス。`service_account`タイプのみがサポートされています。設定されている場合、この値は`config.toml`で直接設定された`AccessID`と`PrivateKey`よりも優先されます。 | | `AccessID` | 文字列 | ストレージへのアクセスに使用されるGCPサービスアカウントのID。 | | `PrivateKey` | 文字列 | GCSリクエストの署名に使用される秘密キー。 | | `BucketName` | 文字列 | キャッシュが保存されるストレージバケットの名前。 | 例: **`config.toml`ファイルで直接設定された認証情報** ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] AccessID = "cache-access-account@test-project-123456.iam.gserviceaccount.com" PrivateKey = "-----BEGIN PRIVATE KEY-----\nXXXXXX\n-----END PRIVATE KEY-----\n" BucketName = "runners-cache" ``` **GCPからダウンロードしたJSONファイル内の認証情報** ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] CredentialsFile = "/etc/gitlab-runner/service-account.json" BucketName = "runners-cache" ``` **GCPのメタデータサーバーからのアプリケーションデフォルト認証情報(ADC)** GitLab RunnerとGoogle Cloud ADCを使用する場合、通常はデフォルトのサービスアカウントを使用します。その場合、インスタンスの認証情報を提供する必要はありません。 ```toml [runners.cache] Type = "gcs" Path = "path/to/prefix" Shared = false [runners.cache.gcs] BucketName = "runners-cache" ``` ADCを使用する場合は、使用するサービスアカウントに`iam.serviceAccounts.signBlob`権限があることを確認してください。通常、これは[サービスアカウントトークン作成者のロール](https://docs.cloud.google.com/iam/docs/service-account-permissions#token-creator-role)をサービスアカウントに付与することで行われます。 #### GKEのワークロードアイデンティティフェデレーション {#workload-identity-federation-for-gke} GKEのワークロードアイデンティティフェデレーションは、アプリケーションデフォルト認証情報(ADC)でサポートされています。ワークロードアイデンティティが機能しないイシューが発生した場合: - `ERROR: generating signed URL`メッセージについては、Runnerポッドログ(ビルドログではなく)を確認してください。このエラーは、次のようなパーミッションのイシューを示している可能性があります: ```plaintext IAM returned 403 Forbidden: Permission 'iam.serviceAccounts.getAccessToken' denied on resource (or it may not exist). ``` - Runnerポッド内から次の`curl`コマンドを試してください: ```shell curl -H "Metadata-Flavor: Google" http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/email ``` このコマンドは、正しいKubernetesサービスアカウントを返すはずです。次に、アクセストークンを取得してみてください: ```shell curl -H "Metadata-Flavor: Google" http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token?scopes=https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fcloud-platform ``` コマンドが成功すると、結果はアクセストークンを含むJSONペイロードを返します。失敗した場合は、サービスアカウントの権限を確認してください。 ### `[runners.cache.azure]`セクション {#the-runnerscacheazure-section} 次のパラメータは、Azure Blob Storageのネイティブサポートを定義します。詳細については、[Azure Blob Storageのドキュメント](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction)を参照してください。S3やGCSではオブジェクトの集合に`bucket`という用語が使用されていますが、Azureではblobの集合に`container`が使用されています。 | パラメータ | 型 | 説明 | |-----------------|--------|-------------| | `AccountName` | 文字列 | ストレージへのアクセスに使用するAzure Blob Storageアカウントの名前。 | | `AccountKey` | 文字列 | コンテナへのアクセスに使用するストレージアカウントのアクセスキー。設定から`AccountKey`を省略するには、[AzureワークロードまたはマネージドID](#azure-workload-and-managed-identities)を使用します。 | | `ContainerName` | 文字列 | キャッシュデータを保存する[ストレージコンテナ](https://learn.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction#containers)の名前。 | | `StorageDomain` | 文字列 | [Azureストレージエンドポイントのサービスに使用される](https://learn.microsoft.com/en-us/azure/china/resources-developer-guide#check-endpoints-in-azure)ドメイン名(オプション)。デフォルトは`blob.core.windows.net`です。 | 例: ```toml [runners.cache] Type = "azure" Path = "path/to/prefix" Shared = false [runners.cache.azure] AccountName = "" AccountKey = "" ContainerName = "runners-cache" StorageDomain = "blob.core.windows.net" ``` #### AzureワークロードIDとマネージドID {#azure-workload-and-managed-identities} {{< history >}} - GitLab Runner v17.5.0で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27303)されました。 {{< /history >}} AzureワークロードまたはマネージドIDを使用するには、設定から`AccountKey`を省略します。`AccountKey`が空白の場合、Runnerは次の処理を試みます。 1. [`DefaultAzureCredential`を使用](https://github.com/Azure/azure-sdk-for-go/blob/main/sdk/azidentity/README.md#defaultazurecredential)して一時的な認証情報を取得します。 1. [ユーザー委任キー](https://learn.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key)を取得します。 1. そのキーを使用して、ストレージアカウントのblobにアクセスするためのSASトークンを生成します。 インスタンスに`Storage Blob Data Contributor`ロールが割り当てられていることを確認します。上記のアクションを実行するためのアクセス権がインスタンスにない場合、GitLab Runnerは`AuthorizationPermissionMismatch`エラーを報告します。 AzureワークロードIDを使用するには、IDに関連付けられている`service_account`を追加し、ポッドラベル`azure.workload.identity/use`を`runner.kubernetes`セクションに追加します。たとえば、`service_account`が`gitlab-runner`の場合は次のようになります。 ```toml [runners.kubernetes] service_account = "gitlab-runner" [runners.kubernetes.pod_labels] "azure.workload.identity/use" = "true" ``` `service_account`に、`azure.workload.identity/client-id`アノテーションが関連付けられていることを確認します。 ```yaml serviceAccount: annotations: azure.workload.identity/client-id: ``` GitLab 17.7以降では、ワークロードIDのセットアップにはこの設定で十分です。 ただし、GitLab Runner 17.5および17.6では、Runnerマネージャーにも以下の設定が必要です。 - `azure.workload.identity/use`ポッドラベル - ワークロードIDで使用するサービスアカウント たとえば、GitLab Runner Helmチャートを使用する場合は次のようになります。 ```yaml serviceAccount: name: "gitlab-runner" podLabels: azure.workload.identity/use: "true" ``` 認証情報は異なるソースから取得されるため、このラベルが必要です。キャッシュのダウンロードの場合、認証情報はRunnerマネージャーから取得されます。キャッシュのアップロードの場合、認証情報は[ヘルパーイメージ](#helper-image)を実行するポッドから取得されます。 詳細については、[イシュー38330](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38330)を参照してください。 ## `[runners.kubernetes]`セクション {#the-runnerskubernetes-section} 次の表に、Kubernetes executorで使用できる設定パラメータを示します。その他のパラメータについては、[Kubernetes executorのドキュメント](../executors/kubernetes/_index.md)を参照してください。 | パラメータ | 型 | 説明 | |------------------------------|---------|-------------| | `host` | 文字列 | オプション。KubernetesホストのURL。指定されていない場合、Runnerは自動検出を試みます。 | | `cert_file` | 文字列 | オプション。Kubernetes認証証明書。 | | `key_file` | 文字列 | オプション。Kubernetes認証秘密キー。 | | `ca_file` | 文字列 | オプション。Kubernetes認証CA証明書。 | | `image` | 文字列 | ジョブでコンテナイメージが指定されていない場合に使用するデフォルトのコンテナイメージ。 | | `allowed_images` | 配列 | `.gitlab-ci.yml`で許可されるコンテナイメージのワイルドカードリスト。この設定が存在しない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorまたは[Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executorで使用します。 | | `allowed_services` | 配列 | `.gitlab-ci.yml`で許可されるサービスのワイルドカードリスト。この設定が存在しない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[Docker](../executors/docker.md#restrict-docker-images-and-services) executorまたは[Kubernetes](../executors/kubernetes/_index.md#restrict-docker-images-and-services) executorで使用します。 | | `namespace` | 文字列 | Kubernetesジョブを実行するネームスペース。 | | `privileged` | ブール値 | 特権フラグを有効にしてすべてのコンテナを実行します。 | | `allow_privilege_escalation` | ブール値 | オプション。`allowPrivilegeEscalation`フラグを有効にしてすべてのコンテナを実行します。 | | `node_selector` | テーブル | `string=string`の`key=value`ペアの`table`。ポッドの作成が、すべての`key=value`ペアに一致するKubernetesノードに制限されます。 | | `image_pull_secrets` | 配列 | プライベートレジストリからのコンテナイメージのプル認証に使用されるKubernetesの`docker-registry`シークレット名を含む項目の配列。 | | `logs_base_dir` | 文字列 | ビルドログを保存するために生成されたパスの前に付加されるベースディレクトリ。GitLab Runner 17.2で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760)されました。 | | `scripts_base_dir` | 文字列 | ビルドスクリプトを保存するために生成されたパスの前に付加されるベースディレクトリ。GitLab Runner 17.2で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760)されました。 | | `service_account` | 文字列 | ジョブ/executorポッドがKubernetes APIと通信するために使用するデフォルトのサービスアカウント。 | 例: ```toml [runners.kubernetes] host = "https://45.67.34.123:4892" cert_file = "/etc/ssl/kubernetes/api.crt" key_file = "/etc/ssl/kubernetes/api.key" ca_file = "/etc/ssl/kubernetes/ca.crt" image = "golang:1.8" privileged = true allow_privilege_escalation = true image_pull_secrets = ["docker-registry-credentials", "optional-additional-credentials"] allowed_images = ["ruby:*", "python:*", "php:*"] allowed_services = ["postgres:9.4", "postgres:latest"] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" [runners.kubernetes.node_selector] gitlab = "true" ``` ## ヘルパーイメージ {#helper-image} `docker`、`docker+machine`、または`kubernetes` executorを使用すると、GitLab RunnerはGit、アーティファクト、およびキャッシュ操作の処理に特定のコンテナを使用します。このコンテナは、`helper image`という名前のイメージから作成されます。 ヘルパーイメージは、amd64、ARM、arm64、s390x、ppc64le、およびriscv64アーキテクチャで使用できます。これには、GitLab Runnerバイナリの特別なコンパイルである`gitlab-runner-helper`バイナリが含まれています。これには、利用可能なコマンドのサブセットと、Git、Git LFS、およびSSL証明書ストアのみが含まれています。 ヘルパーイメージには、`alpine`、`alpine3.21`、`alpine-latest`、`ubi-fips`、`ubuntu`のようないくつかの種類があります。`alpine`イメージはフットプリントが小さいため、デフォルトです。`helper_image_flavor = "ubuntu"`を使用すると、ヘルパーイメージの`ubuntu`フレーバーが選択されます。 GitLab Runner 16.1から17.1では、`alpine`フレーバーは`alpine3.18`のエイリアスです。GitLab Runner 17.2から17.6では、`alpine3.19`のエイリアスです。GitLab Runner 17.7以降では、`alpine3.21`のエイリアスとなっています。GitLab Runner 18.4以降では、`alpine-latest`のエイリアスです。 `alpine-latest`フレーバーは、`alpine:latest`をベースイメージとして使用し、新しいアップストリームのバージョンがリリースされると、自動的にバージョンが上がります。 GitLab Runnerが`DEB`パッケージまたは`RPM`パッケージからインストールされると、サポートされているアーキテクチャ用のイメージがホストにインストールされます。Docker Engineが指定されたイメージバージョンを見つけられない場合、Runnerはジョブを実行する前に自動的にダウンロードします。`docker` executorと`docker+machine` executorの両方がこのように動作します。 `alpine`フレーバーの場合、デフォルトの`alpine`フレーバーイメージのみがパッケージに含まれています。その他すべてのフレーバーは、レジストリからダウンロードされます。 GitLab Runnerの手動インストールと`kubernetes` executorは異なる動作をします。 - 手動インストールの場合は、`gitlab-runner-helper`バイナリは含まれていません。 - `kubernetes` executorの場合、Kubernetes APIは`gitlab-runner-helper`イメージをローカルアーカイブから読み込むことを許可しません。 いずれの場合も、GitLab Runnerは[ヘルパーイメージをダウンロード](#helper-image-registry)します。GitLab Runnerのリビジョンとアーキテクチャによって、ダウンロードするタグが決まります。 ### Arm上のKubernetes用ヘルパーイメージ設定 {#helper-image-configuration-for-kubernetes-on-arm} 既定では、アーキテクチャに適した[ヘルパーイメージ](../executors/kubernetes/_index.md#operating-system-architecture-and-windows-kernel-version)が選択されます。`arm64` Kubernetesクラスターで`arm64`ヘルパーイメージを使用するためにカスタム`helper_image`パスを設定する必要がある場合は、[設定ファイル](../executors/kubernetes/_index.md#configuration-settings)で次の値を設定します: ```toml [runners.kubernetes] helper_image = "my.registry.local/gitlab/gitlab-runner-helper:arm64-v${CI_RUNNER_VERSION}" ``` ### 古いバージョンのAlpine Linuxを使用するRunnerイメージ {#runner-images-that-use-an-old-version-of-alpine-linux} {{< history >}} - GitLab Runner 14.5で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3122)されました。 {{< /history >}} イメージは、複数のAlpine Linuxバージョンでビルドされています。新しいバージョンのAlpineを使用できますが、同時に古いバージョンも使用できます。 ヘルパーイメージの場合は、`helper_image_flavor`を変更するか、[ヘルパーイメージ](#helper-image)セクションを参照してください。 GitLab Runnerイメージの場合は、`alpine`、`alpine3.19`、`alpine3.21`、または`alpine-latest`がバージョンの前にイメージのプレフィックスとして使用されるように、同じロジックに従ってください: ```shell docker pull gitlab/gitlab-runner:alpine3.19-v16.1.0 ``` ### Alpine `pwsh`イメージ {#alpine-pwsh-images} GitLab Runner 16.1以降、すべての`alpine`ヘルパーイメージには`pwsh`バリアントがあります。唯一の例外は`alpine-latest`です。これは、GitLab Runnerヘルパーイメージのベースとなる[`powershell` Dockerイメージ](https://learn.microsoft.com/en-us/powershell/scripting/install/powershell-in-docker?view=powershell-7.4)が`alpine:latest`をサポートしていないためです。 例: ```shell docker pull registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:alpine3.21-x86_64-v17.7.0-pwsh ``` ### ヘルパーイメージレジストリ {#helper-image-registry} GitLab 15.0以前では、Docker Hubのイメージを使用するようにヘルパーイメージを設定します。 GitLab 15.1以降では、ヘルパーイメージは、GitLab.com上のGitLab Containerレジストリから`registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}`でプルされます。GitLab Self-Managedインスタンスも、既定でGitLab.com上のGitLab Containerレジストリからヘルパーイメージをプルします。GitLab.com上のGitLab Containerレジストリのステータスを確認するには、[GitLabシステムのステータス](https://status.gitlab.com/)を参照してください。 ### ヘルパーイメージを上書きする {#override-the-helper-image} 場合によっては、次の理由でヘルパーイメージを上書きする必要があります。 1. **ジョブ実行の高速化**: インターネット接続の速度が遅い環境では、同じイメージを複数回ダウンロードすると、ジョブの実行に時間がかかる可能性があります。`registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ`の正確なコピーが保存されているローカルレジストリからヘルパーイメージをダウンロードすることで、処理を高速化できます。 1. **セキュリティに関する懸念**: 事前にチェックされていない外部依存関係をダウンロードしたくない場合があります。レビューが完了し、ローカルリポジトリに保存されている依存関係のみを使用するというビジネスルールが存在する可能性があります。 1. **インターネットにアクセスできないビルド環境**: [オフライン環境にKubernetesクラスターをインストールしている](../install/operator.md#install-gitlab-runner-operator-on-kubernetes-clusters-in-offline-environments)場合は、ローカルイメージレジストリまたはパッケージリポジトリを使用して、CI/CDジョブで使用されるイメージをプルできます。 1. **追加のソフトウェア**: `git+http`の代わりに`git+ssh`を使用してアクセス可能なサブモジュールをサポートするために、`openssh`のような追加のソフトウェアをヘルパーイメージにインストールしたい場合があります。 このような場合は、`docker`、`docker+machine`、および`kubernetes` executorで利用可能な`helper_image`設定フィールドを使用して、カスタムイメージを設定できます。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:tag" ``` ヘルパーイメージのバージョンは、GitLab Runnerのバージョンと緊密に結合されていると考えてください。これらのイメージを提供する主な理由の1つは、GitLab Runnerが`gitlab-runner-helper`バイナリを使用していることです。このバイナリは、GitLab Runnerソースの一部からコンパイルされます。このバイナリは、両方のバイナリで同じであることが期待される内部APIを使用しています。 デフォルトでは、GitLab Runnerは`registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ`イメージを参照します。ここで、`XYZ`はGitLab RunnerのアーキテクチャとGitリビジョンに基づいています。[バージョン変数](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/common/version.go#L60-61)のいずれかを使用することによって、イメージバージョンを定義することができます。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}" ``` この設定により、GitLab Runnerはexecutorに対し、コンパイルデータに基づくバージョン`x86_64-v${CI_RUNNER_VERSION}`のイメージを使用するように指示します。GitLab Runnerが新しいバージョンに更新された後で、GitLab Runnerは適切なイメージをダウンロードしようとします。GitLab Runnerをアップグレードする前に、イメージをレジストリにアップロードする必要があります。そうしないと、ジョブが「No such image」(指定されたイメージが見つかりません)エラーで失敗し始めます。 ヘルパーイメージは、`$CI_RUNNER_REVISION`に加えて`$CI_RUNNER_VERSION`によってタグ付けされます。どちらのタグも有効であり、同じイメージを指しています。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) helper_image = "my.registry.local/gitlab/gitlab-runner-helper:x86_64-v${CI_RUNNER_VERSION}" ``` #### PowerShell Coreを使用する場合 {#when-using-powershell-core} PowerShell Coreを含むLinux用のヘルパーイメージの追加バージョンは、`registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:XYZ-pwsh`タグを使用して公開されます。 ## `[runners.custom_build_dir]`セクション {#the-runnerscustom_build_dir-section} {{< history >}} - GitLab Runner 11.10で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1267)されました。 {{< /history >}} このセクションでは、[カスタムビルドディレクトリ](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories)パラメータを定義します。 この機能は、明示的に設定されていない場合でも、`kubernetes`、`docker`、`docker+machine`、`docker autoscaler`、および`instance` executorで、デフォルトで有効になっています。他のすべてのexecutorでは、デフォルトで無効になっています。 この機能を使用するには、`runners.builds_dir`で定義されたパスに`GIT_CLONE_PATH`が含まれている必要があります。`builds_dir`を使用するには、`$CI_BUILDS_DIR`変数を使用します。 デフォルトでは、この機能は`docker` executorと`kubernetes` executorでのみ有効になっています。これは、これらのexecutorがリソースを分離するのに適した方法を提供するためです。この機能はどのexecutorでも明示的に有効にできますが、`builds_dir`を共有し、`concurrent > 1`が設定されたexecutorで使用する場合は注意が必要です。 | パラメータ | 型 | 説明 | |-----------|---------|-------------| | `enabled` | ブール値 | ユーザーがジョブのカスタムビルドディレクトリを定義できるようにします。 | 例: ```toml [runners.custom_build_dir] enabled = true ``` ### デフォルトのビルドディレクトリ {#default-build-directory} GitLab Runnerは、_ビルドディレクトリ_と呼ばれるベースパスの下に存在するパスにリポジトリをクローンします。このベースディレクトリのデフォルトの場所は、executorによって異なります。詳細は以下の説明を参照してください。 - [Kubernetes](../executors/kubernetes/_index.md)、[Docker](../executors/docker.md)、[Docker Machine](../executors/docker_machine.md) executorの場合は、コンテナ内の`/builds`です。 - [Instance](../executors/instance.md)の場合は、ターゲットマシンへのSSH接続またはWinRM接続を処理するように設定されているユーザーのホームディレクトリにある`~/builds`です。 - [Docker Autoscaler](../executors/docker_autoscaler.md)の場合は、コンテナ内の`/builds`です。 - [Shell](../executors/shell.md) executorの場合は、`$PWD/builds`です。 - [SSH](../executors/ssh.md)、[VirtualBox](../executors/virtualbox.md)、[Parallels](../executors/parallels.md) executorの場合は、ターゲットマシンへのSSH接続を処理するように設定されているユーザーのホームディレクトリにある`~/builds`です。 - [Custom](../executors/custom.md) executorの場合はデフォルトが提供されていないため、明示的に設定する必要があります。設定されていない場合、ジョブが失敗します。 使用される_ビルドディレクトリ_は、ユーザーが[`builds_dir`](#the-runners-section)設定で明示的に定義できます。 {{< alert type="note" >}} カスタムディレクトリにクローンする場合は、[`GIT_CLONE_PATH`](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories)を指定することもできます。その場合は以下のガイドラインは適用されません。 {{< /alert >}} GitLab Runnerは、実行するすべてのジョブに_ビルドディレクトリ_を使用しますが、特定のパターン`{builds_dir}/$RUNNER_TOKEN_KEY/$CONCURRENT_PROJECT_ID/$NAMESPACE/$PROJECT_NAME`を使用してそれらをネストします。例: `/builds/2mn-ncv-/0/user/playground`。 GitLab Runnerは、ユーザーが_ビルドディレクトリ_に保存することを妨げません。たとえば、CI実行中に使用できるツールを`/builds/tools`内に保存できます。この操作は**極力**控えてください。_ビルドディレクトリ_には何も保存しないでください。GitLab Runnerはこの動作を完全に制御する必要があり、そのような場合には安定性が保証されません。CIに必要な依存関係がある場合は、他の場所にインストールする必要があります。 ## Git設定をクリーンアップする {#cleaning-git-configuration} {{< history >}} - GitLab Runner 17.10で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5438)されました。 {{< /history >}} すべてのビルドの開始時と終了時に、GitLab Runnerはリポジトリとそのサブモジュールから次のファイルを削除します。 - Gitロックファイル(`{index,shallow,HEAD,config}.lock`) - post-checkoutフック(`hooks/post-checkout`) `clean_git_config`を有効にすると、リポジトリ、そのサブモジュール、およびGitテンプレートディレクトリから、次の追加ファイルまたはディレクトリが削除されます。 - `.git/config`ファイル - `.git/hooks`ディレクトリ このクリーンアップにより、カスタムGit設定、一時的なGit設定、または潜在的に悪意のあるGit設定がジョブ間でキャッシュされることを防ぎます。 GitLab Runner 17.10より前では、クリーンアップの動作が異なっていました。 - Gitロックファイルとpost-checkoutフックのクリーンアップは、ジョブの開始時にのみ行われ、終了時には行われませんでした。 - 他のGit設定(現在は`clean_git_config`で制御されるようになった設定)は、`FF_ENABLE_JOB_CLEANUP`が設定されていない場合には削除されませんでした。このフラグを設定すると、メインリポジトリの`.git/config`のみが削除されますが、サブモジュールの設定は削除されませんでした。 `clean_git_config`設定はデフォルトで`true`です。ただし、次の場合はデフォルトで`false`です。 - [Shell executor](../executors/shell.md)が使用されている。 - [Git戦略](https://docs.gitlab.com/ci/runners/configure_runners/#git-strategy)が`none`に設定されている。 明示的な`clean_git_config`設定は、デフォルト設定よりも優先されます。 ## `[runners.referees]`セクション {#the-runnersreferees-section} GitLab Runnerレフェリーを使用して、追加のジョブモニタリングデータをGitLabに渡します。レフェリーは、ジョブに関連する追加データの照会と収集を行うRunnerマネージャーのワーカーです。結果は、ジョブアーティファクトとしてGitLabにアップロードされます。 ### Metrics Runnerレフェリーを使用する {#use-the-metrics-runner-referee} ジョブを実行しているマシンまたはコンテナが[Prometheus](https://prometheus.io)メトリクスを公開している場合、GitLab Runnerはジョブ期間全体にわたってPrometheusサーバーに照会できます。受信したメトリクスはジョブアーティファクトとしてアップロードされ、後で分析に使用できます。 [`docker-machine` executor](../executors/docker_machine.md)のみがレフェリーをサポートしています。 ### GitLab Runner用のMetrics Runnerレフェリーを設定する {#configure-the-metrics-runner-referee-for-gitlab-runner} `config.toml`ファイルの`[[runner]]`セクションで`[runner.referees]`と`[runner.referees.metrics]`を定義し、次のフィールドを追加します。 | 設定 | 説明 | |----------------------|-------------| | `prometheus_address` | GitLab Runnerインスタンスからメトリクスを収集するサーバー。ジョブの完了時にRunnerマネージャーからアクセスできる必要があります。 | | `query_interval` | ジョブに関連付けられているPrometheusインスタンスに対し、時系列データが照会を受ける頻度。間隔(秒単位)として定義されます。 | | `queries` | 各間隔で実行される[PromQL](https://prometheus.io/docs/prometheus/latest/querying/basics/)クエリの配列。 | `node_exporter`メトリクスの構成を網羅した設定例を次に示します。 ```toml [[runners]] [runners.referees] [runners.referees.metrics] prometheus_address = "http://localhost:9090" query_interval = 10 metric_queries = [ "arp_entries:rate(node_arp_entries{{selector}}[{interval}])", "context_switches:rate(node_context_switches_total{{selector}}[{interval}])", "cpu_seconds:rate(node_cpu_seconds_total{{selector}}[{interval}])", "disk_read_bytes:rate(node_disk_read_bytes_total{{selector}}[{interval}])", "disk_written_bytes:rate(node_disk_written_bytes_total{{selector}}[{interval}])", "memory_bytes:rate(node_memory_MemTotal_bytes{{selector}}[{interval}])", "memory_swap_bytes:rate(node_memory_SwapTotal_bytes{{selector}}[{interval}])", "network_tcp_active_opens:rate(node_netstat_Tcp_ActiveOpens{{selector}}[{interval}])", "network_tcp_passive_opens:rate(node_netstat_Tcp_PassiveOpens{{selector}}[{interval}])", "network_receive_bytes:rate(node_network_receive_bytes_total{{selector}}[{interval}])", "network_receive_drops:rate(node_network_receive_drop_total{{selector}}[{interval}])", "network_receive_errors:rate(node_network_receive_errs_total{{selector}}[{interval}])", "network_receive_packets:rate(node_network_receive_packets_total{{selector}}[{interval}])", "network_transmit_bytes:rate(node_network_transmit_bytes_total{{selector}}[{interval}])", "network_transmit_drops:rate(node_network_transmit_drop_total{{selector}}[{interval}])", "network_transmit_errors:rate(node_network_transmit_errs_total{{selector}}[{interval}])", "network_transmit_packets:rate(node_network_transmit_packets_total{{selector}}[{interval}])" ] ``` メトリクスクエリの形式は`canonical_name:query_string`です。クエリ文字列は、実行中に置き換えられる2つの変数をサポートしています。 | 設定 | 説明 | |--------------|-------------| | `{selector}` | 特定のGitLab RunnerインスタンスによってPrometheusで生成されたメトリクスを選択する`label_name=label_value`ペアに置き換えられます。 | | `{interval}` | このレフェリーの`[runners.referees.metrics]`設定の`query_interval`パラメータに置き換えられます。 | たとえば、`docker-machine` executorを使用する共有GitLab Runner環境では、`{selector}`が`node=shared-runner-123`のようになります。 ================================================ FILE: docs-locale/ja-jp/configuration/autoscale.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Docker Machine Executorのオートスケール設定 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="note" >}} Docker Machine ExecutorはGitLab 17.5で非推奨となりました。GitLab 20.0(2027年5月)で削除される予定です。GitLab 20.0まではDocker Machine Executorのサポートが継続されますが、新機能を追加する予定はありません。CI/CDジョブの実行を妨げる可能性のある重大なバグ、または実行コストに影響を与えるバグのみに対処します。Amazon Web Services(AWS)EC2、Microsoft Azure Compute、またはGoogle Compute Engine(GCE)でDocker Machine Executorを使用している場合は、[GitLab Runner Autoscaler](../runner_autoscale/_index.md)に移行してください。 {{< /alert >}} オートスケール機能を使用すると、より柔軟かつ動的な方法でリソースを使用できます。 GitLab Runnerはオートスケールできるため、インフラストラクチャには、常に必要な数のビルドインスタンスのみが含まれます。オートスケールのみを使用するようにGitLab Runnerを設定すると、GitLab Runnerをホスティングするシステムは、作成するすべてのマシンの踏み台として機能します。このマシンは「Runnerマネージャー」と呼ばれます。 {{< alert type="note" >}} DockerではDocker Machineが非推奨になりました。Docker Machineは、パブリッククラウド仮想マシンでRunnerをオートスケールするために使用される基盤技術です。詳細については、[Docker Machineの非推奨に対応するための戦略について説明するイシュー](https://gitlab.com/gitlab-org/gitlab/-/issues/341856)をお読みください。 {{< /alert >}} Docker Machine autoscalerは、`limit`と`concurrent`の設定に関係なく、VMごとに1つのコンテナを作成します。 この機能が有効であり、適切に設定されている場合、ジョブは_オンデマンド_で作成されたマシン上で実行されます。これらのマシンは、ジョブの完了後に次のジョブを実行するために待機するか、設定された`IdleTime`の経過後に削除できます。多くのクラウドプロバイダーでは、この方法は既存のインスタンスを使用することでコストを削減します。 以下に、[GitLab Community Edition](https://gitlab.com/gitlab-org/gitlab-foss)プロジェクトのGitLab.comでテストされたGitLab Runnerオートスケール機能の実例を示します: ![オートスケールの実例](img/autoscale-example.png) チャートに示されている各マシンは独立したクラウドインスタンスであり、Dockerコンテナ内でジョブを実行します。 ## システム要件 {#system-requirements} オートスケールを設定する前に、次のことを行う必要があります: - [独自の環境を準備します](../executors/docker_machine.md#preparing-the-environment)。 - (オプション)GitLabが提供するDocker Machineの[フォークバージョン](../executors/docker_machine.md#forked-version-of-docker-machine)を使用します。これにはいくつかの追加修正が含まれています。 ## サポートされているクラウドプロバイダー {#supported-cloud-providers} オートスケールメカニズムは[Docker Machine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/)に基づいています。サポートされているすべての仮想化およびクラウドプロバイダーのパラメータは、GitLabが管理する[Docker Machine](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/)のフォークで利用できます。 ## Runnerの設定 {#runner-configuration} このセクションでは、重要なオートスケールパラメータについて説明します。設定の詳細については、[高度な設定](advanced-configuration.md)を参照してください。 ### Runnerのグローバルオプション {#runner-global-options} | パラメータ | 値 | 説明 | |--------------|---------|-------------| | `concurrent` | 整数 | グローバルで同時に実行できるジョブの数を制限します。このパラメータは、ローカルとオートスケールの両方で、_すべて_の定義済みRunnerを使用できるジョブの最大数を設定します。`limit`([`[[runners]]`セクション](#runners-options))および`IdleCount`([`[runners.machine]`セクション](advanced-configuration.md#the-runnersmachine-section))とともに、作成されるマシンの数の上限に影響します。 | ### `[[runners]]`のオプション {#runners-options} | パラメータ | 値 | 説明 | |------------|---------|-------------| | `executor` | 文字列 | オートスケール機能を使用するには、`executor`を`docker+machine`に設定する必要があります。 | | `limit` | 整数 | この特定のトークンで同時に処理できるジョブの数を制限します。`0`は制限がないことを意味します。オートスケールの場合、これはこのプロバイダーによって作成されるマシンの数の上限です(`concurrent`および`IdleCount`との組み合わせ)。 | ### `[runners.machine]`のオプション {#runnersmachine-options} 設定パラメータの詳細については、[GitLab Runner - 高度な構成 - `[runners.machine]`セクション](advanced-configuration.md#the-runnersmachine-section)を参照してください。 ### `[runners.cache]`のオプション {#runnerscache-options} 設定パラメータの詳細については、[GitLab Runner - 高度な構成 - `[runners.cache]`のセクション](advanced-configuration.md#the-runnerscache-section)を参照してください。 ### その他の設定情報 {#additional-configuration-information} `IdleCount = 0`を設定する場合には特別なモードもあります。このモードでは、(アイドル状態のマシンがない場合は)各ジョブの前にマシンが**常にon-demand**(オンデマンド)で作成されます。ジョブが完了すると、オートスケールアルゴリズムは[以下の説明と同様に](#autoscaling-algorithm-and-parameters)動作します。マシンが次のジョブを待機しているが実行するジョブがない場合、`IdleTime`期間の経過後にマシンは削除されます。ジョブがない場合、アイドル状態のマシンはありません。 `IdleCount`が`0`より大きな値に設定されている場合、アイドル状態のVMがバックグラウンドで作成されます。Runnerは新しいジョブを要求する前に、既存のアイドル状態のVMを取得します。 - ジョブがRunnerに割り当てられている場合、そのジョブは以前に取得したVMに送信されます。 - ジョブがRunnerに割り当てられていない場合、アイドル状態のVMのロックが解除され、VMはプールに戻されます。 ## Docker Machine Executorによって作成されるVMの数を制限する {#limit-the-number-of-vms-created-by-the-docker-machine-executor} Docker Machine Executorによって作成される仮想マシン(VM)の数を制限するには、`config.toml`ファイルの`[[runners]]`セクションの`limit`パラメータを使用します。 `concurrent`パラメータではVMの数は**does not**(制限されません)。 複数のRunnerワーカーを管理するように1つのプロセスを設定できます。詳細については、[基本設定: 1つのRunnerマネージャー、1つのRunner](../fleet_scaling/_index.md#basic-configuration-one-runner-manager-one-runner)を参照してください。 次の例は、1つのRunnerプロセスに対して`config.toml`ファイルで設定された値を示しています: ```toml concurrent = 100 [[runners]] name = "first" executor = "shell" limit = 40 (...) [[runners]] name = "second" executor = "docker+machine" limit = 30 (...) [[runners]] name = "third" executor = "ssh" limit = 10 [[runners]] name = "fourth" executor = "virtualbox" limit = 20 (...) ``` この設定では次のようになります: - 1つのRunnerプロセスで、異なる実行環境を使用する4つの異なるRunnerワーカーを作成できます。 - `concurrent`の値が100に設定されているため、この1つのRunnerは、最大100個のGitLab CI/CDジョブを同時実行します。 - `second` RunnerワーカーのみがDocker Machine Executorを使用するように設定されているため、このワーカーがVMを自動的に作成できます。 - `limit`が`30`に設定されているため、`second` Runnerワーカーは常に、オートスケールされたVMで最大30個のCI/CDジョブを実行できます。 - `concurrent`は複数の`[[runners]]`ワーカー全体のグローバルな並行処理制限を定義しますが、`limit`は1つの`[[runners]]`ワーカーの最大同時実行数を定義します。 この例では、Runnerプロセスは次のように処理します: - すべての`[[runners]]`ワーカー全体で最大100個の同時ジョブ。 - `first`ワーカーの場合、40個以下のジョブ。これらのジョブは`shell` executorを使用して実行されます。 - `second`ワーカーの場合、30個以下のジョブ。これらのジョブは`docker+machine` executorを使用して実行されます。さらに、Runnerは`[runners.machine]`のオートスケール設定に基づいてVMを維持しますが、維持するVMの数は、すべての状態(アイドル状態、使用中、作成中、削除中)で30個以下です。 - `third`ワーカーの場合、10個以下のジョブ。これらのジョブは`ssh` executorで実行されます。 - `fourth`ワーカーの場合、20個以下のジョブ。これらのジョブは`virtualbox` executorで実行されます。 次の2番目の例では、`docker+machine` executorを使用するように設定された2つの`[[runners]]`ワーカーがあります。この設定では、各Runnerワーカーは、`limit`パラメータの値によって制約される個別のVMプールを管理します。 ```toml concurrent = 100 [[runners]] name = "first" executor = "docker+machine" limit = 80 (...) [[runners]] name = "second" executor = "docker+machine" limit = 50 (...) ``` この例では、次のようになります: - Runnerプロセスが処理するジョブは最大100個です(`concurrent`の値)。 - Runnerプロセスは、2つの`[[runners]]`ワーカーでジョブを実行します。各ワーカーは`docker+machine` executorを使用します。 - `first` Runnerは最大80個のVMを作成できます。したがって、このRunnerはいつでも最大80個のジョブを実行できます。 - `second` Runnerは最大50個のVMを作成できます。したがって、このRunnerはいつでも最大50個のジョブを実行できます。 {{< alert type="note" >}} 制限値の合計は`130`(`80 + 50`)ですが、グローバルな`concurrent`の設定が100であるため、Runnerプロセスが同時実行するジョブの最大数は100個です。 {{< /alert >}} ## オートスケールアルゴリズムとパラメータ {#autoscaling-algorithm-and-parameters} オートスケールアルゴリズムは次のパラメータに基づいています: - `IdleCount` - `IdleCountMin` - `IdleScaleFactor` - `IdleTime` - `MaxGrowthRate` - `limit` ジョブを実行していないマシンはすべてアイドル状態とみなされます。オートスケールモードのGitLab Runnerはすべてのマシンをモニタリングし、アイドル状態のマシンの数が常に`IdleCount`であるようにします。 アイドル状態のマシンの数が不十分な場合、GitLab Runnerは`MaxGrowthRate`制限に従って新しいマシンのプロビジョニングを開始します。`MaxGrowthRate`値を超える数のマシンに対するリクエストは、作成されているマシンの数が`MaxGrowthRate`を下回るまで保留されます。 同時に、GitLab Runnerは各マシンのアイドル状態の期間を確認します。この時間が`IdleTime`の値を超えている場合と、マシンは自動的に削除されます。 ### 設定の例 {#example-configuration} 次のオートスケールパラメータで設定されているGitLab Runnerについて考えてみましょう: ```toml [[runners]] limit = 10 # (...) executor = "docker+machine" [runners.machine] MaxGrowthRate = 1 IdleCount = 2 IdleTime = 1800 # (...) ``` 最初に、ジョブがキューに入れられていない場合、GitLab Runnerは2台のマシン(`IdleCount = 2`)を起動し、それらをアイドル状態に設定します。また、`IdleTime`は30分(`IdleTime = 1800`)に設定されています。 次に、GitLab CI/CDで5つのジョブがキューに入れられているとします。最初の2個のジョブが、2台あるアイドル状態のマシンに送信されます。GitLab Runnerは、アイドル状態のマシンの数が`IdleCount`よりも少ない(`0 < 2`)ことを認識したため、新しいマシンを起動します。これらのマシンは、`MaxGrowthRate`を超えないように順次プロビジョニングされます。 残りの3個のジョブは、準備ができた最初のマシンに割り当てられます。最適化として、これは以前にビジー状態だったがジョブを完了したマシンか、新しくプロビジョニングされたマシンにできます。この例では、プロビジョニングが高速で、以前のジョブが完了する前に新しいマシンが準備できていると仮定します。 現在、1台のアイドル状態のマシンがあるため、GitLab Runnerは`IdleCount`を満たすために新しいマシンを1台起動します。キューに新しいジョブがないため、この2台のマシンはアイドル状態になり、GitLab Runnerは満足します。 **What happened**(発生した状況): この例では、新しいジョブを待機しているアイドル状態のマシンが2台あります。5つのジョブがキューに入れられた後、新しいマシンが作成されます。したがって、合計7台のマシンがあります。5つはジョブを実行しており、2つは次のジョブを待機中のアイドル状態です。 GitLab Runnerは、`IdleCount`が満たされるまで、ジョブの実行に使用されるマシンとして新しいアイドル状態のマシンを作成します。これらのマシンは、`limit`パラメータで定義された数になるまで作成されます。GitLab Runnerは、この`limit`に達したことを検出し、オートスケールを停止します。新しいジョブは、マシンがアイドル状態に戻るまで、ジョブキューで待機する必要があります。 上記の例では、アイドル状態のマシンが常に2台利用可能です。`IdleTime`パラメータが適用されるのは、数値が`IdleCount`を超えた場合だけです。その時点でGitLab Runnerは、マシンの数を減らして`IdleCount`になるようにします。 **Scaling down**(スケールダウン): ジョブが完了すると、マシンはアイドル状態に設定され、新しいジョブが実行されるまで待機します。新しいジョブがキューに表れない場合、`IdleTime`で指定された時間が経過した後にアイドルマシンが削除されます。この例の場合、(各マシンの最後のジョブの実行が終了した時点から測定して)非アクティブ状態が30分続いた後にすべてのマシンが削除されます。GitLab Runnerは、この例の最初の部分と同じように、アイドル状態のマシンを`IdleCount`台、実行し続けます。 オートスケールアルゴリズムは次のように動作します: 1. GitLab Runnerが起動します。 1. GitLab Runnerがアイドル状態のマシンを2台作成します 1. GitLab Runnerが1つのジョブを選択します。 1. GitLab Runnerは、アイドルマシンを2台維持するためにもう1台のマシンを作成します。 1. 選択されたジョブが終了し、アイドルマシンが3台になります。 1. 3台のアイドルマシンのうちの1台は、その最後のジョブを選択してから`IdleTime`を超えた時点で削除されます。 1. 迅速なジョブ処理のため、GitLab Runnerは、少なくとも2台のアイドルマシンを常に保持します。 次の図は、マシンとビルド(ジョブ)の時間的推移を示しています: ![オートスケール状態のチャート](img/autoscale-state-chart.png) ## `concurrent`、`limit`、`IdleCount`によって実行マシン数の上限が生成される仕組み {#how-concurrent-limit-and-idlecount-generate-the-upper-limit-of-running-machines} `limit`または`concurrent`に設定すべき値を示す魔法のような方程式は存在しません。各自のニーズに応じて設定してください。`IdleCount`の数のアイドル状態のマシンを維持することで、処理がスピードアップします。インスタンスが作成されるまで、10秒/20秒/30秒にわたって待つ必要はありません。ただしユーザーとしては、(料金を支払う必要のある)すべてのマシンにジョブを実行させ、アイドル状態にしないようにしたいと考えます。したがって`concurrent`と`limit`は、料金を支払う最大数のマシンを実行する値に設定する必要があります。`IdleCount`は、ジョブキューが空の場合に維持する_未使用_のマシンの最小数を示す値に設定する必要があります。 次の例を考えてみましょう: ```toml concurrent=20 [[runners]] limit = 40 [runners.machine] IdleCount = 10 ``` 上記のシナリオでは、作成するマシンの総数は30です。マシン(ビルド中およびアイドル状態)の総数の`limit`を40に設定できます。10台のアイドル状態のマシンを維持できますが、`concurrent`ジョブは20個です。したがって、20台の同時実行マシンがジョブを実行し、10台のマシンがアイドル状態であるため、総数は30になります。 しかし`limit`が、作成される可能性があるマシンの総数よりも少ない場合はどうなるでしょうか?以下の例で、このケースについて説明します: ```toml concurrent=20 [[runners]] limit = 25 [runners.machine] IdleCount = 10 ``` この例では、最大20個の同時実行ジョブと25台のマシンを持つことができます。`limit`が25であるため、最悪の場合はアイドル状態のマシンの数は10ではなく5になります。 ## `IdleScaleFactor`戦略 {#the-idlescalefactor-strategy} `IdleCount`パラメータは、Runnerが維持する必要があるアイドル状態のマシンの静的な数を定義します。割り当てる値はユースケースによって異なります。 まず、アイドル状態のマシンの数としてある程度少ない数を割り当てます。次に、現在の使用状況に応じて自動的にこの数を大きな数に調整します。このために実験的な`IdleScaleFactor`設定を使用します。 {{< alert type="warning" >}} `IdleScaleFactor`は内部的に`float64`値であり、浮動小数点数形式を使用する必要があります(`0.0`、`1.0`、`1.5`など)。整数形式(`IdleScaleFactor = 1`など)を使用すると、Runnerのプロセスはエラー`FATAL: Service run failed error=toml: cannot load TOML value of type int64 into a Go float`で失敗します。 {{< /alert >}} この設定を使用すると、GitLab Runnerは定義された数のアイドル状態のマシンを維持しようとします。ただしこの数はもはや静的ではありません。GitLab Runnerは`IdleCount`を使用する代わりに、使用中のマシンをカウントし、必要なアイドル状態のマシンの数をその数の係数として定義します。 使用中のマシンがない場合、`IdleScaleFactor`は維持するアイドル状態のマシンがないと評価されます。`IdleCount`が`0`よりも大きい場合(かつ`IdleScaleFactor`が適用可能な場合のみ)、ジョブを処理できるアイドル状態のマシンがないと、Runnerはジョブを要求しません。新しいジョブがない場合、使用中のマシンの数は増加しないため、`IdleScaleFactor`は常に`0`と評価されます。これにより、Runnerは使用不可能な状態でブロックされます。 このことから、2番目の設定`IdleCountMin`が導入されました。これは、`IdleScaleFactor`の評価結果に関係なく維持する必要があるアイドル状態のマシンの最小数を定義します。**`IdleScaleFactor`を使用する場合、この設定は1未満に設定できません。Runnerは自動的に`IdleCountMin`を1に設定します**。 `IdleCountMin`を使用して、常に利用可能である必要があるアイドル状態のマシンの最小数を定義することもできます。これにより、キューに入れられる新しいジョブをすばやく開始できます。`IdleCount`と同様に、割り当てる値はユースケースによって異なります。 次に例を示します: ```toml concurrent=200 [[runners]] limit = 200 [runners.machine] IdleCount = 100 IdleCountMin = 10 IdleScaleFactor = 1.1 ``` この場合、Runnerは決定ポイントに近づくと、使用中のマシンの数を確認します。たとえば、5台のアイドル状態のマシンと10台の使用中のマシンがあるとします。Runnerはこの数に`IdleScaleFactor`を乗算して、11台のアイドル状態のマシンが必要であると判断します。そのため、さらに6台のマシンが作成されます。 アイドル状態のマシンが90台、使用中のマシンが100台ある場合、GitLab Runnerは`IdleScaleFactor`に基づいて、`100 * 1.1 = 110`台のアイドル状態のマシンが必要であると認識します。そのため、再び新しいマシンの作成を開始します。ただし、アイドル状態のマシンの数が`100`に達すると、これは`IdleCount`で定義された上限であるため、アイドル状態のマシンの作成が停止します。 使用中のアイドル状態のマシンが100台から20台に減った場合、必要なアイドル状態のマシン数は`20 * 1.1 = 22`になります。GitLab Runnerはマシンの停止を開始します。前述したように、GitLab Runnerは`IdleTime`の間に使用されていないマシンを削除します。したがって、過剰な数のアイドル状態のVMの削除が積極的に行われます。 アイドル状態のマシンの数が0になった場合、必要なアイドル状態のマシン数は`0 * 1.1 = 0`です。ただし、これは定義されている`IdleCountMin`設定よりも少ないため、Runnerは残りのVMの数が10台になるまで、アイドル状態のVMを削除します。VMの数が10台になった時点でスケールダウンが停止し、Runnerは10台のマシンをアイドル状態で維持します。 ## オートスケールの期間を設定する {#configure-autoscaling-periods} オートスケールは、期間に応じて異なる値を持つように設定できます。組織によっては、実行されるジョブの数が急増する定期的な時間帯と、ジョブがほとんどまたはまったくない時間帯がある場合があります。たとえば、ほとんどの民間企業は月曜日から金曜日の午前10時から午後6時までのような固定時間で稼働しています。週の夜間と週末には、パイプラインは開始されません。 これらの期間は`[[runners.machine.autoscaling]]`セクションを使用して設定できます。各期間では、一連の`Periods`に基づいて`IdleCount`と`IdleTime`を設定することがサポートされています。 **How autoscaling periods work**(オートスケールの期間の仕組み) `[runners.machine]`設定に複数の`[[runners.machine.autoscaling]]`セクションを追加できます。各セクションには、独自の`IdleCount`、`IdleTime`、`Periods`、および`Timezone`プロパティがあります。最も一般的なシナリオから最も具体的なシナリオの順に、設定ごとにセクションを定義する必要があります。 すべてのセクションが解析されます。現在の時刻に一致する最後のセクションがアクティブになります。一致するものがない場合、`[runners.machine]`のルートの値が使用されます。 次に例を示します: ```toml [runners.machine] MachineName = "auto-scale-%s" MachineDriver = "google" IdleCount = 10 IdleTime = 1800 [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` この設定では、すべての平日の9時から16時59分(UTC)までの期間は、稼働時間中の大量のトラフィックを処理するためにマシンがオーバープロビジョニングされます。週末には、トラフィックの減少を考慮して`IdleCount`が5に減っています。それ以外の期間には、値はルートのデフォルト(`IdleCount = 10`と`IdleTime = 1800`)から取得されます。 {{< alert type="note" >}} 指定した期間の最後の分の59秒目は、その期間の一部と*みなされません*。詳細については、[イシュー#2170](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2170)を参照してください。 {{< /alert >}} 期間の`Timezone`を指定できます(`"Australia/Sydney"`など)。指定しない場合、すべてのRunnerのホストマシンのシステム設定が使用されます。このデフォルトは、`Timezone = "Local"`として明示的に記述できます。 `[[runner.machine.autoscaling]]`セクションの構文の詳細については、[GitLab Runner - 詳細設定 - `[runners.machine]`セクション](advanced-configuration.md#the-runnersmachine-section)を参照してください。 ## 分散Runnerキャッシュ {#distributed-runners-caching} {{< alert type="note" >}} [分散キャッシュの使用方法](speed_up_job_execution.md#use-a-distributed-cache)を参照してください。 {{< /alert >}} ジョブの処理をスピードアップするために、GitLab Runnerは、選択されたディレクトリやファイルを保存し、後続のジョブ間で共有する[キャッシュメカニズム](https://docs.gitlab.com/ci/yaml/#cache)を提供します。 このメカニズムは、ジョブが同じホストで実行される場合には正常に機能します。ただし、GitLab Runnerオートスケール機能を使用し始めると、ほとんどのジョブは新しい(またはほぼ新しい)ホストで実行されます。この新しいホストは、新しいDockerコンテナで各ジョブを実行します。その場合、キャッシュ機能を利用することはできません。 この問題に対処するために、オートスケール機能とともに分散Runnerキャッシュ機能が導入されました。 この機能は設定済みのオブジェクトストレージサーバーを使用して、使用中のDockerホスト間でキャッシュを共有します。GitLab Runnerはサーバーをクエリし、アーカイブをダウンロードしてキャッシュを復元するか、アップロードしてキャッシュをアーカイブします。 分散キャッシュを有効にするには、`config.toml`で[`[runners.cache]`ディレクティブ](advanced-configuration.md#the-runnerscache-section)を使用して定義する必要があります: ```toml [[runners]] limit = 10 executor = "docker+machine" [runners.cache] Type = "s3" Path = "path/to/prefix" Shared = false [runners.cache.s3] ServerAddress = "s3.example.com" AccessKey = "access-key" SecretKey = "secret-key" BucketName = "runner" Insecure = false ``` 上記の例では、S3 URLは`http(s)://///runner//project//`構造に従っています。 2つ以上のRunnerの間でキャッシュを共有するには、`Shared`フラグをtrueに設定します。このフラグにより、URLからRunnerトークン(`runner/`)が削除され、設定されているすべてのRunnerが同じキャッシュを共有するようになります。キャッシュ共有が有効になっている場合にRunner間でキャッシュを分離するために、`Path`を設定することもできます。 ## 分散コンテナレジストリミラーリング {#distributed-container-registry-mirroring} Dockerコンテナ内で実行されるジョブを高速化するには、[Dockerレジストリミラーリングサービス](https://docs.docker.com/retired/#registry-now-cncf-distribution)を使用できます。このサービスは、Docker Machineと使用されているすべてのレジストリの間のプロキシを提供します。イメージはレジストリミラーによって1回ダウンロードされます。新しい各ホスト、またはイメージが利用できない既存のホストで、設定されたレジストリミラーからイメージがダウンロードされます。 ミラーがDocker MachineのLANに存在する場合、各ホストでのイメージのダウンロードステップははるかに高速になります。 Dockerレジストリミラーリングを設定するには、`config.toml`で設定に`MachineOptions`を追加する必要があります: ```toml [[runners]] limit = 10 executor = "docker+machine" [runners.machine] (...) MachineOptions = [ (...) "engine-registry-mirror=http://10.11.12.13:12345" ] ``` ここで`10.11.12.13:12345`は、レジストリミラーがDockerサービスからの接続をリッスンしているIPアドレスとポートです。Docker Machineによって作成された各ホストからアクセスできる必要があります。 [コンテナのプロキシの使用方法](speed_up_job_execution.md#use-a-proxy-for-containers)の詳細を参照してください。 ## 完全な`config.toml`の例 {#a-complete-example-of-configtoml} 以下に示す`config.toml`では、[`google` Docker Machineドライバー](https://github.com/docker/docs/blob/173d3c65f8e7df2a8c0323594419c18086fc3a30/machine/drivers/gce.md)が使用されています: ```toml concurrent = 50 # All registered runners can run up to 50 concurrent jobs [[runners]] url = "https://gitlab.com" token = "RUNNER_TOKEN" # Note this is different from the registration token used by `gitlab-runner register` name = "autoscale-runner" executor = "docker+machine" # This runner is using the 'docker+machine' executor limit = 10 # This runner can execute up to 10 jobs (created machines) [runners.docker] image = "ruby:3.3" # The default image used for jobs is 'ruby:3.3' [runners.machine] IdleCount = 5 # There must be 5 machines in Idle state - when Off Peak time mode is off IdleTime = 600 # Each machine can be in Idle state up to 600 seconds (after this it will be removed) - when Off Peak time mode is off MaxBuilds = 100 # Each machine can handle up to 100 jobs in a row (after this it will be removed) MachineName = "auto-scale-%s" # Each machine will have a unique name ('%s' is required) MachineDriver = "google" # Refer to Docker Machine docs on how to authenticate: https://docs.docker.com/machine/drivers/gce/#credentials MachineOptions = [ "google-project=GOOGLE-PROJECT-ID", "google-zone=GOOGLE-ZONE", # e.g. 'us-west1' "google-machine-type=GOOGLE-MACHINE-TYPE", # e.g. 'n1-standard-8' "google-machine-image=ubuntu-os-cloud/global/images/family/ubuntu-1804-lts", "google-username=root", "google-use-internal-ip", "engine-registry-mirror=https://mirror.gcr.io" ] [[runners.machine.autoscaling]] # Define periods with different settings Periods = ["* * 9-17 * * mon-fri *"] # Every workday between 9 and 17 UTC IdleCount = 50 IdleCountMin = 5 IdleScaleFactor = 1.5 # Means that current number of Idle machines will be 1.5*in-use machines, # no more than 50 (the value of IdleCount) and no less than 5 (the value of IdleCountMin) IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] # During the weekends IdleCount = 5 IdleTime = 60 Timezone = "UTC" [runners.cache] Type = "s3" [runners.cache.s3] ServerAddress = "s3.eu-west-1.amazonaws.com" AccessKey = "AMAZON_S3_ACCESS_KEY" SecretKey = "AMAZON_S3_SECRET_KEY" BucketName = "runner" Insecure = false ``` `MachineOptions`パラメータには、Docker MachineがGoogle Compute Engineでマシンを作成するために使用する`google`ドライバーのオプションと、Docker Machine自体のオプション(`engine-registry-mirror`)の両方が含まれています。 ================================================ FILE: docs-locale/ja-jp/configuration/configuring_runner_operator.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: OpenShiftでのGitLab Runnerの設定 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} このドキュメントでは、OpenShiftでGitLab Runnerを設定する方法について説明します。 ## GitLab Runner Operatorへのプロパティの引き渡し {#passing-properties-to-gitlab-runner-operator} `Runner`を作成する際、その`spec`にプロパティを設定することで、それを設定できます。たとえば、runnerが登録されているGitLab URLや、登録トークンを含むシークレットの名前を指定できます: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret # Name of the secret containing the Runner token ``` 使用可能なすべてのプロパティについては、[Operatorのプロパティ](#operator-properties)をお読みください。 ## Operatorのプロパティ {#operator-properties} 次のプロパティをOperatorに渡すことができます。 一部のプロパティは、より新しいバージョンのOperatorでのみ使用できます。 | 設定 | オペレーター | 説明 | |--------------------|----------|-------------| | `gitlabUrl` | すべて | GitLabインスタンスの完全修飾ドメイン名(例:`https://gitlab.example.com`)。 | | `token` | すべて | Runnerの登録に使用される`Secret``runner-registration-token`キーを含むシークレットの名前。 | | `tags` | すべて | Runnerに適用されるコンマ区切りのトピックのリスト。 | | `concurrent` | すべて | 同時に実行できるジョブの数を制限します。最大数は、定義されているすべてのrunnerです。0は無制限を意味しません。デフォルトは`10`です。 | | `interval` | すべて | 新しいジョブのチェック間隔(秒数)を定義します。デフォルトは`30`です。 | | `locked` | 1.8 | Runnerをプロジェクトにロックするかどうかを定義します。デフォルトは`false`です。 | | `runUntagged` | 1.8 | タグなしのジョブを実行するかどうかを定義します。タグが指定されていない場合、`true`がデフォルトです。それ以外の場合は、`false`になります。 | | `protected` | 1.8 | Runnerが保護ブランチでのみジョブを実行するかどうかを定義します。デフォルトは`false`です。 | | `cloneURL` | すべて | GitLabインスタンスのURLを上書きします。RunnerがGitlab URLに接続できない場合にのみ使用されます。 | | `env` | すべて | Runnerポッドの環境変数として挿入されるキー/バリューペアを含む`ConfigMap`の名前。 | | `runnerImage` | 1.7 | デフォルトのGitLab Runner Dockerイメージを上書きします。デフォルトは、オペレーターにバンドルされていたRunnerイメージです。 | | `helperImage` | すべて | デフォルトのGitLab Runnerヘルパーイメージを上書きします。 | | `buildImage` | すべて | 指定されていない場合にビルドに使用するデフォルトのDockerイメージ。 | | `cacheType` | すべて | Runnerアーティファクトに使用されるキャッシュのタイプ。`gcs`、`s3`、`azure`のいずれか。 | | `cachePath` | すべて | ファイルシステム上のキャッシュパスを定義します。 | | `cacheShared` | すべて | Runner間でキャッシュの共有を有効にします。 | | `s3` | すべて | S3キャッシュの設定に使用されるオプション。[キャッシュプロパティ](#cache-properties)を参照してください。 | | `gcs` | すべて | `gcs`キャッシュの設定に使用されるオプション。[キャッシュプロパティ](#cache-properties)を参照してください。 | | `azure` | すべて | Azureキャッシュの設定に使用されるオプション。[キャッシュプロパティ](#cache-properties)を参照してください。 | | `ca` | すべて | カスタム認証局 () 証明書を含むTLSシークレットの名前。 | | `serviceaccount` | すべて | Runnerポッドの実行に使用されるサービスアカウントをオーバーライドするために使用します。 | | `config` | すべて | [設定テンプレート](../register/_index.md#register-with-a-configuration-template)を使用して、カスタム`ConfigMap`を提供するために使用します。 | | `shutdownTimeout` | 1.34 | [強制シャットダウン操作](../commands/_index.md#signals)がタイムアウトになりプロセスが終了するまでの秒数を示します。デフォルト値は`30`です。`0`以下に設定すると、デフォルト値が使用されます。 | | `logLevel` | 1.34 | ログレベルを定義します。オプションには、`debug`、`info`、`warn`、`error`、`fatal`、`panic`があります。 | | `logFormat` | 1.34 | ログ形式を指定します。オプションには、`runner`、`text`、`json`があります。デフォルト値は`runner`で、色分けのためのANSIエスケープコードが含まれています。 | | `listenAddr` | 1.34 | Prometheusメトリクス用HTTPサーバーがリッスンするアドレス(`:`)を定義します。設定の詳細については、[GitLab Runner Operatorの監視](../monitoring/_index.md#monitor-operator-managed-gitlab-runners)を参照してください。 | | `sentryDsn` | 1.34 | Sentryへのすべてのシステムレベルのエラーの追跡を有効にします。 | | `connectionMaxAge` | 1.34 | GitLabサーバーへのTLSキープアライブ接続を再接続するまでの最大時間を指定します。デフォルト値は`15m`(15分)です。`0`以下に設定すると、接続は可能な限り持続します。 | | `podSpec` | 1.23 | GitLab Runnerポッド(テンプレート)に適用するパッチのリスト。詳細については、[Runnerポッドテンプレートのパッチ](#patching-the-runner-pod-template)を参照してください。 | | `deploymentSpec` | 1.40 | GitLab Runnerデプロイに適用するパッチのリスト。詳細については、[Runnerデプロイテンプレートのパッチ](#patching-the-runner-deployment-template)を参照してください。 | ## キャッシュプロパティ {#cache-properties} ### S3キャッシュ {#s3-cache} | 設定 | オペレーター | 説明 | |---------------|----------|-------------| | `server` | すべて | S3サーバーアドレス。 | | `credentials` | すべて | `accesskey`プロパティと`secretkey`プロパティを含む、オブジェクトストレージへのアクセスに使用される`Secret`の名前。 | | `bucket` | すべて | キャッシュが保存されているバケットの名前。 | | `location` | すべて | キャッシュが保存されているS3リージョンの名前。 | | `insecure` | すべて | インセキュアな接続または`HTTP`を使用します。 | ### `gcs` キャッシュ {#gcs-cache} | 設定 | オペレーター | 説明 | |-------------------|----------|-------------| | `credentials` | すべて | `access-id`プロパティと`private-key`プロパティを含む、オブジェクトストレージへのアクセスに使用される`Secret`の名前。 | | `bucket` | すべて | キャッシュが保存されているバケットの名前。 | | `credentialsFile` | すべて | `gcs`認証情報ファイル`keys.json`を取得します。 | ### Azureキャッシュ {#azure-cache} | 設定 | オペレーター | 説明 | |-----------------|----------|-------------| | `credentials` | すべて | `accountName`プロパティと`privateKey`プロパティを含む、オブジェクトストレージへのアクセスに使用される`Secret`の名前。 | | `container` | すべて | キャッシュが保存されているAzureコンテナの名前。 | | `storageDomain` | すべて | Azure blobストレージのドメイン名。 | ## プロキシ環境の設定 {#configure-a-proxy-environment} プロキシ環境を作成するには: 1. `custom-env.yaml`ファイルを編集します。次に例を示します: ```yaml apiVersion: v1 data: HTTP_PROXY: example.com kind: ConfigMap metadata: name: custom-env ``` 1. OpenShiftを更新して変更を適用します。 ```shell oc apply -f custom-env.yaml ``` 1. [`gitlab-runner.yml`](../install/operator.md#install-gitlab-runner)ファイルを更新してください。 ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret # Name of the secret containing the Runner token env: custom-env ``` プロキシがKubernetes APIにアクセスできない場合は、CI/CDジョブでエラーが発生する可能性があります: ```shell ERROR: Job failed (system failure): prepare environment: setting up credentials: Post https://172.21.0.1:443/api/v1/namespaces//secrets: net/http: TLS handshake timeout. Check https://docs.gitlab.com/runner/shells/#shell-profile-loading for more information ``` このエラーを解決するには、Kubernetes APIのIPアドレスを`custom-env.yaml`ファイルの`NO_PROXY`設定に追加します: ```yaml apiVersion: v1 data: NO_PROXY: 172.21.0.1 HTTP_PROXY: example.com kind: ConfigMap metadata: name: custom-env ``` Kubernetes APIのIPアドレスは、次を実行して確認できます: ```shell oc get services --namespace default --field-selector='metadata.name=kubernetes' | grep -v NAME | awk '{print $3}' ``` ## `config.toml`を設定テンプレートでカスタマイズする {#customize-configtoml-with-a-configuration-template} [設定テンプレート](../register/_index.md#register-with-a-configuration-template)を使用して、Runnerの`config.toml`ファイルをカスタマイズできます。 1. カスタム設定テンプレートファイルを作成します。たとえば、Runnerに`EmptyDir`ボリュームをマウントし、`cpu_limit`を設定するように指示します。`custom-config.toml`ファイルを作成します: ```toml [[runners]] [runners.kubernetes] cpu_limit = "500m" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty-dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` 1. `custom-config.toml`ファイルから`custom-config-toml`という名前の`ConfigMap`を作成します: ```shell oc create configmap custom-config-toml --from-file config.toml=custom-config.toml ``` 1. `Runner`の`config`プロパティを設定します: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret config: custom-config-toml ``` [既知の問題](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/issues/229)のため、次の設定を変更するには、設定テンプレートの代わりに環境変数を使用する必要があります: | 設定 | 環境変数 | デフォルト値 | |----------------------------------|------------------------------|---------------| | `runners.request_concurrency` | `RUNNER_REQUEST_CONCURRENCY` | `1` | | `runners.output_limit` | `RUNNER_OUTPUT_LIMIT` | `4096` | | `kubernetes.runner.poll_timeout` | `KUBERNETES_POLL_TIMEOUT` | `180` | ## カスタムTLS証明書の設定 {#configure-a-custom-tls-cert} 1. カスタムTLS証明書を設定するには、キー`tls.crt`を持つシークレットを作成します。この例では、ファイルの名前は`custom-tls-ca-secret.yaml`です: ```yaml apiVersion: v1 kind: Secret metadata: name: custom-tls-ca type: Opaque stringData: tls.crt: | -----BEGIN CERTIFICATE----- MIIEczCCA1ugAwIBAgIBADANBgkqhkiG9w0BAQQFAD..AkGA1UEBhMCR0Ix ..... 7vQMfXdGsRrXNGRGnX+vWDZ3/zWI0joDtCkNnqEpVn..HoX -----END CERTIFICATE----- ``` 1. シークレットを作成します: ```shell oc apply -f custom-tls-ca-secret.yaml ``` 1. `runner.yaml`の`ca`キーを、シークレットの名前と同じ名前に設定します: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret ca: custom-tls-ca ``` ## RunnerポッドのCPUおよびメモリサイズの設定 {#configure-the-cpu-and-memory-size-of-runner-pods} カスタム`config.toml`ファイルで[CPU制限](../executors/kubernetes/_index.md#cpu-requests-and-limits)と[メモリ制限](../executors/kubernetes/_index.md#memory-requests-and-limits)を設定するには、[このトピック](#customize-configtoml-with-a-configuration-template)の手順に従ってください。 ## クラスターリソースに基づいて、Runnerごとのジョブの並行処理を設定します {#configure-job-concurrency-per-runner-based-on-cluster-resources} `Runner`リソースの`concurrent`プロパティを設定します: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret concurrent: 2 ``` ジョブの並行処理は、プロジェクトの要件によって決まります。 1. まず、CIジョブを実行するために必要なコンピューティングリソースとメモリリソースを特定します。 1. クラスター内のリソースを考慮して、そのジョブを何回実行できるかを計算します。 高い並行処理値を設定すると、Kubernetesエグゼキューターは可能な限りすぐにジョブを処理します。ただし、ジョブがスケジュールされるタイミングは、Kubernetesクラスターのスケジューラ容量によって決まります。 ## GitLab Runnerマネージャーのサービスアカウント {#service-account-for-the-gitlab-runner-manager} 新規インストールの場合は、これらのRBACロールバインディングリソースが存在しない場合、GitLab Runnerはrunnerマネージャーポッド用に`gitlab-runner-app-sa`という名前のKubernetes `ServiceAccount`を作成します: - `gitlab-runner-app-rolebinding` - `gitlab-runner-rolebinding` ロールバインディングのいずれかが存在する場合、GitLabは、ロールバインディングで定義されている`subjects`と`roleRef`からロールとサービスアカウントを解決します。 両方のロールバインディングが存在する場合、`gitlab-runner-app-rolebinding`は`gitlab-runner-rolebinding`よりも優先されます。 ## トラブルシューティング {#troubleshooting} ### ルートと非ルート {#root-vs-non-root} GitLab Runner OperatorとGitLab Runnerポッドは、非ルートユーザーとして実行されます。そのため、ジョブで使用されるビルドイメージは、正常に完了できるように、非ルートユーザーとして実行する必要があります。これにより、ジョブは最小限の権限で正常に実行されることが保証されます。 これを機能させるには、CI/CDジョブに使用されるビルドイメージが以下であることを確認してください: - 非ルートとして実行 - 制限されたファイルシステムに書き込まない OpenShiftクラスター上のほとんどのコンテナファイルシステムは読み取り専用ですが、次の例外があります: - マウントされたボリューム - `/var/tmp` - `/tmp` - `tmpfs`としてルートファイルシステムにマウントされたその他のボリューム #### `HOME`環境変数のオーバーライド {#overriding-the-home-environment-variable} カスタムビルドイメージを作成するか、[環境変数をオーバーライドする](#configure-a-proxy-environment)場合は、`HOME`環境変数が`/`に設定されていないことを確認してください。これは読み取り専用になります。特に、ジョブがホームディレクトリにファイルを書き込む必要がある場合。たとえば、`/home`の下にディレクトリ(`/home/ci`など)を作成し、`Dockerfile`で`ENV HOME=/home/ci`を設定できます。 Runnerポッドの場合、[`HOME`が`/home/gitlab-runner`に設定されることが予想されます](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/-/blob/e265820a00a6a1b9a271dc132de2618ced43cf92/runner/Dockerfile.OCP#L14)。この変数が変更された場合、新しい場所には[適切な権限](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/-/blob/e265820a00a6a1b9a271dc132de2618ced43cf92/runner/Dockerfile.OCP#L38)が必要です。これらのガイドラインは、[Red Hatコンテナプラットフォームのドキュメント](https://docs.redhat.com/en/documentation/openshift_container_platform/4.18/html/images/creating-images#images-create-guide-openshift_create-images)にも記載されています。 ### `locked`変数のオーバーライド {#overriding-locked-variable} Runnerトークンを登録するときに、`locked`変数を`true`に設定すると、エラー`Runner configuration other than name, description, and exector is reserved and cannot be specified`が表示されます。 ```yaml locked: true # REQUIRED tags: "" runUntagged: false protected: false maximumTimeout: 0 ``` 詳細については、[イシュー472](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/472#note_1483346437)を参照してください。 #### セキュリティコンテキスト制約に注意してください {#watch-out-for-security-context-constraints} デフォルトでは、新しいOpenShiftプロジェクトにインストールすると、GitLab Runner Operatorは非ルートとして実行されます。`default`プロジェクトなどの一部のプロジェクトは、すべてのサービスアカウントが`anyuid`アクセス権を持っている例外です。その場合、イメージのユーザーは`root`です。これは、ジョブなど、コンテナShell内で`whoami`を実行することで確認できます。[Red Hatコンテナプラットフォームのドキュメント](https://docs.redhat.com/en/documentation/openshift_container_platform/4.18/html/authentication_and_authorization/managing-pod-security-policies)のセキュリティコンテキスト制約の詳細をご覧ください。 #### `anyuid`セキュリティコンテキストの制約として実行 {#run-as-anyuid-security-context-constraints} {{< alert type="warning" >}} ルートとしてジョブを実行したり、ルートファイルシステムに書き込んだりすると、システムがセキュリティリスクにさらされる可能性があります。 {{< /alert >}} CI/CDジョブをルートユーザーとして実行したり、ルートファイルシステムに書き込んだりするには、`gitlab-runner-app-sa`サービスアカウントに`anyuid`セキュリティコンテキスト制約を設定します。GitLab Runnerコンテナは、このサービスアカウントを使用します。 OpenShift 4.3.8以前: ```shell oc adm policy add-scc-to-user anyuid -z gitlab-runner-app-sa -n # Check that the anyiud SCC is set: oc get scc anyuid -o yaml ``` OpenShift 4.3.8以降: ```shell oc create -f - < rules: - apiGroups: - security.openshift.io resourceNames: - anyuid resources: - securitycontextconstraints verbs: - use EOF oc create -f - < subjects: - kind: ServiceAccount name: gitlab-runner-app-sa roleRef: kind: Role name: scc-anyuid apiGroup: rbac.authorization.k8s.io EOF ``` #### ヘルパーコンテナとビルドコンテナのユーザーIDとグループIDのマッチング {#matching-helper-container-and-build-container-user-id-and-group-id} GitLab Runner Operatorデプロイでは、`registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp`がデフォルトのヘルパーイメージとして使用されます。このイメージは、セキュリティコンテキストによって明示的に変更されない限り、ユーザーIDとグループID `1001:1001`で実行されます。 ビルドコンテナのユーザーIDがヘルパーイメージのユーザーIDと異なる場合、ビルド中に権限関連のエラーが発生する可能性があります。一般的なエラーメッセージを次に示します: ```shell fatal: detected dubious ownership in repository at '/builds/gitlab-org/gitlab-runner' ``` このエラーは、リポジトリがユーザーID `1001`(ヘルパーコンテナ)によってクローンされたことを示していますが、ビルドコンテナ内の別のユーザーIDがそれにアクセスしようとしています。 **解決策**: ヘルパーコンテナのユーザーIDとグループIDに合わせて、ビルドコンテナのセキュリティコンテキストを設定します: ```toml [runners.kubernetes.build_container_security_context] run_as_user = 1001 run_as_group = 1001 ``` **Additional notes**(追加の注意)* - これらの設定により、リポジトリをクローンするコンテナと、それをビルドするコンテナの間で、一貫したファイルの所有権が保証されます。 - 異なるユーザーIDまたはグループIDでヘルパーイメージをカスタマイズした場合は、これらの値をそれに応じて調整します。 - OpenShiftデプロイの場合は、これらのセキュリティコンテキスト設定がクラスターのセキュリティコンテキスト制約(SCCS)に準拠していることを確認してください。 #### SETFCAPの設定 {#configure-setfcap} Red Hat OpenShiftコンテナプラットフォーム(RHOCP)4.11以降を使用している場合は、次のエラーメッセージが表示されることがあります: ```shell error reading allowed ID mappings:error reading subuid mappings for user ``` 一部のジョブ(`buildah`など)では、正しく実行するために`SETFCAP`機能が付与されている必要があります。このイシューを解決するには、次の手順に従います: 1. GitLab Runnerが使用しているセキュリティコンテキスト制約にSETFCAP機能を追加します(GitLab Runnerポッドに割り当てられているセキュリティコンテキスト制約を`gitlab-scc`に置き換えます): ```shell oc patch scc gitlab-scc --type merge -p '{"allowedCapabilities":["SETFCAP"]}' ``` 1. `config.toml`を更新し、`kubernetes`セクションの下に`SETFCAP`機能を追加します: ```yaml [[runners]] [runners.kubernetes] [runners.kubernetes.pod_security_context] [runners.kubernetes.build_container_security_context] [runners.kubernetes.build_container_security_context.capabilities] add = ["SETFCAP"] ``` 1. GitLab Runnerがデプロイされているネームスペースで、この`config.toml`を使用して`ConfigMap`を作成します: ```shell oc create configmap custom-config-toml --from-file config.toml=config.toml ``` 1. 修正するRunnerを修正し、最近作成した`ConfigMap`を指すように`config:`パラメータを追加します(my-runnerを正しいRunnerポッド名に置き換えます)。 ```shell oc patch runner my-runner --type merge -p '{"spec": {"config": "custom-config-toml"}}' ``` 詳細については、[Red Hatのドキュメント](https://access.redhat.com/solutions/7016013)を参照してください。 ### FIPS準拠のGitLab Runnerを使用する {#using-fips-compliant-gitlab-runner} {{< alert type="note" >}} Operatorの場合、変更できるのはヘルパーイメージのみです。GitLab Runnerイメージはまだ変更できません。[イシュー28814](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28814)は、この機能を追跡します。 {{< /alert >}} [FIPS準拠のGitLab Runnerヘルパー](../install/_index.md#fips-compliant-gitlab-runner)を使用するには、次のようにヘルパーイメージを変更します: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret helperImage: gitlab/gitlab-runner-helper:ubi-fips concurrent: 2 ``` #### 自己署名証明書を使用したGitLab Runnerの登録 {#register-gitlab-runner-by-using-a-self-signed-certificate} 自己署名証明書をGitLab Self-Managedで使用するには、秘密証明書の署名に使用したCA証明書を含むシークレットを作成します。 シークレットの名前は、Runner仕様セクションでCAとして指定されます: ```yaml KIND: Runner VERSION: apps.gitlab.com/v1beta2 FIELD: ca DESCRIPTION: Name of tls secret containing the custom certificate authority (CA) certificates ``` シークレットは、次のコマンドを使用して作成できます: ```shell oc create secret generic mySecret --from-file=tls.crt=myCert.pem -o yaml ``` #### IPアドレスを指す外部URLでGitLab Runnerを登録します {#register-gitlab-runner-with-an-external-url-that-points-to-an-ip-address} Runnerが自己署名証明書とホスト名を一致させることができない場合、エラーメッセージが表示される場合があります。この問題は、ホスト名の代わりにIPアドレス(###.##.##.##など)を使用するようにGitLab Self-Managedを設定した場合に発生します: ```shell [31;1mERROR: Registering runner... failed [0;m [31;1mrunner[0;m=A5abcdEF [31;1mstatus[0;m=couldn't execute POST against https://###.##.##.##/api/v4/runners: Post https://###.##.##.##/api/v4/runners: x509: cannot validate certificate for ###.##.##.## because it doesn't contain any IP SANs [31;1mPANIC: Failed to register the runner. You may be having network problems.[0;m ``` このイシューを解決するには、次の手順に従います: 1. GitLab Self-Managedサーバーで、`subjectAltName`パラメータにIPアドレスを追加するように`openssl`を変更します: ```shell # vim /etc/pki/tls/openssl.cnf [ v3_ca ] subjectAltName=IP:169.57.64.36 <---- Add this line. 169.57.64.36 is your GitLab server IP. ``` 1. 次に、次のコマンドを使用して自己署名CAを再生成します: ```shell # cd /etc/gitlab/ssl # openssl req -x509 -nodes -days 3650 -newkey rsa:4096 -keyout /etc/gitlab/ssl/169.57.64.36.key -out /etc/gitlab/ssl/169.57.64.36.crt # openssl dhparam -out /etc/gitlab/ssl/dhparam.pem 4096 # gitlab-ctl restart ``` 1. この新しい証明書を使用して、新しいシークレットを生成します。 ## パッチの構造 {#patch-structure} 各仕様パッチは、次のプロパティで構成されています: | 設定 | 説明 | |-------------|-------------------------------------------------------------------------------------------------------------------------------------------------| | `name` | カスタム仕様パッチの名前。 | | `patchFile` | 最終的な仕様の生成前に、このオブジェクトに適用する変更を定義するファイルのパス。このファイルはJSONまたはYAMLファイルである必要があります。 | | `patch` | 最終的な仕様に適用する変更を記述したJSONまたはYAML形式の文字列(生成前)。 | | `patchType` | 指定された変更を仕様に適用するために使用される戦略。使用できる値は、`merge`、`json`、`strategic`(デフォルト)です。 | 同じ仕様の設定で、`patchFile`と`patch`の両方を設定することはできません。 ## Runnerポッドテンプレートのパッチ {#patching-the-runner-pod-template} [ポッド仕様](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-template-v1/#PodTemplateSpec)のパッチを使用すると、オペレーターが生成したKubernetesデプロイにパッチを適用することで、GitLab Runnerのデプロイ方法をカスタマイズできます。パッチは、ポッドテンプレートの仕様(`deployment.spec.template.spec`)に適用されます。 次のようなポッドレベルの設定を制御できます: - リソースのリクエストと制限 - セキュリティコンテキスト - ボリュームのマウントとボリューム - 環境変数 - ノードセレクターとアフィニティルール - Tolerations(トレランス) - ホスト名とDNS設定 ## Runnerデプロイテンプレートのパッチ {#patching-the-runner-deployment-template} [デプロイメント仕様](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/deployment-v1/#Deployment)のパッチを使用すると、オペレーターが生成したKubernetesデプロイにパッチを適用することで、GitLab Runnerのデプロイ方法をカスタマイズできます。パッチは、デプロイ仕様(`deployment.spec`)に適用されます。 次のようなデプロイレベルの設定を制御できます: - レプリカ数 - デプロイメント戦略(RollingUpdate、Recreate) - リビジョン履歴制限 - 進捗期限秒数 - ラベルと注釈 ## パッチの順序 {#patch-order} デプロイメント仕様のパッチは、ポッド仕様のパッチの前に適用されます。つまり、デプロイメントとポッドの仕様が同じフィールドを変更した場合、ポッドの仕様が優先されます。 ## 例 {#examples} ### ポッド仕様のパッチの例 {#pod-specification-patching-example} ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret podSpec: - name: "set-hostname" patch: | hostname: "custom-hostname" patchType: "merge" - name: "add-resource-requests" patch: | containers: - name: build resources: requests: cpu: "500m" memory: "256Mi" patchType: "strategic" ``` ### デプロイメント仕様のパッチの例 {#deployment-specification-patching-example} ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: dev spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret deploymentSpec: - name: "set-replicas" patch: | replicas: 3 patchType: "strategic" - name: "configure-strategy" patch: | strategy: type: RollingUpdate rollingUpdate: maxUnavailable: 25% maxSurge: 50% patchType: "strategic" - name: "set-revision-history" patch: | [{"op": "add", "path": "/revisionHistoryLimit", "value": 10}] patchType: "json" ``` ## ベストプラクティス {#best-practices} - 本番環境へのデプロイに適用する前に、非本番環境でパッチをテストします。 - 個々のポッド設定ではなく、デプロイの動作に影響する設定には、デプロイレベルのパッチを使用します。 - ポッド仕様のパッチは、競合するフィールドのデプロイメント仕様のパッチをオーバーライドすることに注意してください。 ================================================ FILE: docs-locale/ja-jp/configuration/feature-flags.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: GitLab Runnerの機能フラグ --- > [!warning] > デフォルトで無効になっている機能を有効にすると、データ破損、安定性の低下、パフォーマンスの低下、およびセキュリティの問題が発生する可能性があります。機能フラグを有効にする前に、有効化に伴うリスクを認識しておく必要があります。詳細については、[開発中の機能を有効にする際のリスク](https://docs.gitlab.com/administration/feature_flags/#risks-when-enabling-features-still-in-development)を参照してください。 機能フラグは、特定の機能を有効または無効を切り替えることができる仕組みです。機能フラグは通常、次の機能に対して使用されます: - ボランティアがテストできるベータ機能のうち、すべてのユーザーに対して有効にできる状態ではない機能。 ベータ機能は、不完全であるか、さらにテストが必要な場合があります。ベータ機能の使用を希望するユーザーは、リスクを受け入れて、機能フラグで機能を明示的に有効にすることを選択できます。機能はデフォルトで無効になっているため、機能を必要としないユーザー、またはシステムのリスクを受け入れたくないユーザーはバグやリグレッションの影響を受けません。 - 近い将来に機能の非推奨化または機能の削除につながる破壊的な変更。 製品の進化に伴い、機能が変更または完全に削除されることがあります。多くの場合既知のバグは修正されますが、ユーザーに対して影響しているバグに対する回避策がすでに判明していることがあります。ユーザーに標準化されたバグ修正を採用することを強制すると、カスタマイズされた設定で他の問題が発生する可能性があります。 そのような場合、機能フラグを使用して、オンデマンドで古い動作から新しい動作に切り替えることができます。これにより、ユーザーは製品の新しいバージョンを採用し、古い動作から新しい動作へのスムーズで永続的な移行を計画するための時間を確保できます。 機能フラグは、環境変数を使用して切り替えます。次のように設定します: - 機能フラグを有効にするには、対応する環境変数を`"true"`または`1`に設定します。 - 機能フラグを無効にするには、対応する環境変数を`"false"`または`0`に設定します。 ## 利用可能な機能フラグ {#available-feature-flags} | 機能フラグ | デフォルト値 | 非推奨 | 削除予定 | 説明 | |--------------|---------------|------------|--------------------|-------------| | `FF_NETWORK_PER_BUILD` | `false` | {{< icon name="dotted-circle" >}} いいえ | | `docker` executorを使用したDockerの[ビルドごとのネットワーク](../executors/docker.md#network-configurations)の作成を有効にします。 | | `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} いいえ | | `false`に設定すると、[#4119](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4119)などのイシューを解決するために、`exec`によるリモートKubernetesコマンドの実行を無効にし、代わりに`attach`を使用します。 | | `FF_USE_DIRECT_DOWNLOAD` | `true` | {{< icon name="dotted-circle" >}} いいえ | | `true`に設定すると、Runnerは最初にGitLabを介してプロキシする代わりに、すべてアーティファクトを直接ダウンロードしようとします。有効にすると、GitLabでオブジェクトストレージが有効になっている場合に、オブジェクトストレージのTLS証明書の検証で発生する問題が原因で、ダウンロードが失敗する可能性があります。[自己署名証明書またはカスタム認証局](tls-self-signed.md)を参照してください。 | | `FF_SKIP_NOOP_BUILD_STAGES` | `true` | {{< icon name="dotted-circle" >}} いいえ | | `false`に設定すると、実行しても効果がない場合でも、すべてのビルドステージが実行されます。 | | `FF_USE_FASTZIP` | `false` | {{< icon name="dotted-circle" >}} いいえ | | Fastzipは、キャッシュ/アーティファクトのアーカイブと解凍を行うための高性能アーカイバーです。 | | `FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、`docker` executorで実行されるジョブに対する`umask 0000`呼び出しの使用が削除されます。代わりに、Runnerはビルドコンテナで使用されるイメージに対して設定されたユーザーのUIDとGIDの検出を試み、(ソースの更新、キャッシュの復元、およびアーティファクトのダウンロード後に)定義済みのコンテナで`chmod`コマンドを実行して、作業ディレクトリとファイルの所有権を変更します。この機能フラグを使用するには、POSIXユーティリティ`id`がビルドイメージにインストールされ、動作可能である必要があります。RunnerはUIDとGIDを取得するために、オプション`-u`と`-g`を指定して`id`を実行します。 | | `FF_ENABLE_BASH_EXIT_CODE_CHECK` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、bashスクリプトは`set -e`のみに依存しませんが、各スクリプトコマンドの実行後にゼロ以外の終了コードを確認します。 | | `FF_USE_WINDOWS_LEGACY_PROCESS_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} いいえ | | GitLab Runner 16.10以降では、デフォルトは`false`です。GitLab Runner 16.9以前では、デフォルトは`true`です。無効にすると、WindowsでRunnerが作成するプロセス(Shell executorとカスタムexecutor)が、追加のセットアップを使用して作成され、これによりプロセスの終了が改善されます。`true`に設定すると、従来のプロセスセットアップが使用されます。Windows Runnerを正常にドレインするには、この機能フラグを`false`に設定する必要があります。 | | `FF_USE_NEW_BASH_EVAL_STRATEGY` | `false` | {{< icon name="dotted-circle" >}} いいえ | | `true`に設定すると、実行されたスクリプトの終了コードを適切に検出できるように、Bash `eval`呼び出しがサブShellで実行されます。 | | `FF_USE_POWERSHELL_PATH_RESOLVER` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、RunnerではなくPowerShellが、Runnerがホストされている場所に固有のOS特有のファイルパス関数を使用して、パス名を解決します。 | | `FF_USE_DYNAMIC_TRACE_FORCE_SEND_INTERVAL` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ログのトレース強制送信間隔は、トレース更新間隔に基づいて動的に調整されます。 | | `FF_SCRIPT_SECTIONS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、複数行のスクリプトコマンドはジョブログで折りたたみ可能なセクションとして表示され、1行のコマンドは`$`プレフィックスを付けて直接出力されます。これは既知のイシューです。詳細については、[イシュー39294](https://gitlab.com/gitlab-org/gitlab-runner/-/work_items/39294)を参照してください。 | | `FF_ENABLE_JOB_CLEANUP` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、プロジェクトディレクトリがビルドの最後にクリーンアップされます。`GIT_CLONE`を使用すると、プロジェクトディレクトリ全体が削除されます。`GIT_FETCH`を使用すると、一連のGit `clean`コマンドが発行されます。 | | `FF_KUBERNETES_HONOR_ENTRYPOINT` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY`がtrueに設定されていない場合、イメージのDockerエントリポイントが実行されます。 | | `FF_POSIXLY_CORRECT_ESCAPES` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、[`bash`スタイルのANSI-Cの引用符の使い方](https://www.gnu.org/software/bash/manual/html_node/Quoting.html)ではなく[POSIX Shellエスケープ](https://pubs.opengroup.org/onlinepubs/9699919799/utilities/V3_chap02.html#tag_18_02)が使用されます。ジョブ環境がPOSIX準拠のShellを使用している場合は、これを有効にする必要があります。 | | `FF_RESOLVE_FULL_TLS_CHAIN` | `false` | {{< icon name="dotted-circle" >}} いいえ | | GitLab Runner 16.4以降では、デフォルトは`false`です。GitLab Runner 16.3以前では、デフォルトは`true`です。有効にすると、Runnerは`CI_SERVER_TLS_CA_FILE`の自己署名ルート証明書までのTLSチェーン全体を解決します。これは以前、v7.68.0以前のlibcurlとOpenSSLを使用してビルドされたGitクライアントで[Git HTTPSクローンを機能させる](tls-self-signed.md#git-cloning)ために必要でした。ただし、古い署名アルゴリズムで署名されたルート証明書を拒否するmacOSなどの一部のオペレーティングシステムでは、証明書解決のプロセスが失敗する可能性があります。証明書の解決が失敗する場合は、この機能を無効にする必要があることがあります。この機能フラグは、[`[runners.feature_flags]`設定](#enable-feature-flag-in-runner-configuration)でのみ無効にできます。 | | `FF_DISABLE_POWERSHELL_STDIN` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Shell executorとカスタムxecutorのPowerShellスクリプトは、stdinを介して渡されて実行されるのではなく、ファイルによって渡されます。これは、ジョブの`allow_failure:exit_codes`キーワードが正しく機能するために必要です。 | | `FF_USE_POD_ACTIVE_DEADLINE_SECONDS` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、[ポッドの`activeDeadlineSeconds`](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#lifecycle)がCI/CDジョブタイムアウトに設定されます。このフラグは、[ポッドのライフサイクル](../executors/kubernetes/_index.md#pod-lifecycle)に影響します。 | | `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ユーザーは`config.toml`ファイルでポッド仕様全体を設定できます。詳細については、[生成されたポッド仕様を上書きする(実験的機能)](../executors/kubernetes/_index.md#overwrite-generated-pod-specifications)を参照してください。 | | `FF_SET_PERMISSIONS_BEFORE_CLEANUP` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、クリーンアップ中の削除が確実に成功するように、最初にプロジェクトディレクトリ内のディレクトリとファイルに対する権限が設定されます。 | | `FF_SECRET_RESOLVING_FAILS_IF_MISSING` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、値が見つからない場合にシークレットの解決が失敗します。 | | `FF_PRINT_POD_EVENTS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ビルドポッドが開始するまで、ビルドポッドに関連付けられているすべてのイベントが出力されます。 | | `FF_USE_GIT_BUNDLE_URIS` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Git `transfer.bundleURI`設定オプションが`true`に設定されます。このFFはデフォルトで有効になっています。Gitバンドルのサポートを無効にするには、`false`に設定します。 | | `FF_USE_GIT_NATIVE_CLONE` | `false` | {{< icon name="dotted-circle" >}} いいえ | | これが有効になっていて、かつ`GIT_STRATEGY=clone`の場合、プロジェクトのクローンを作成するには、`git-init(1)` + `git-fetch(1)`ではなく`git-clone(1)`コマンドを使用します。これにはGitバージョン2.49以降が必要であり、それが利用できない場合は`init` + `fetch`にフォールバックします。 | | `FF_USE_DUMB_INIT_WITH_KUBERNETES_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、すべてのスクリプトの実行に`dumb-init`が使用されます。これにより、`dumb-init`をヘルパーコンテナとビルドコンテナの最初のプロセスとして実行できるようになります。 | | `FF_USE_INIT_WITH_DOCKER_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Docker executorは`--init`オプション(`tini-init`をPID 1として実行)を使用して、サービスコンテナとビルドコンテナを起動します。 | | `FF_LOG_IMAGES_CONFIGURED_FOR_JOB` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Runnerは受信した各ジョブに定義されているイメージとサービスイメージの名前をログに記録します。 | | `FF_USE_DOCKER_AUTOSCALER_DIAL_STDIO` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると(デフォルト)、リモートDockerデーモンへのトンネル接続に`docker system stdio`が使用されます。無効にすると、SSH接続ではネイティブSSHトンネルが使用され、WinRM接続では最初に「fleeting-proxy」ヘルパーバイナリがデプロイされます。 | | `FF_CLEAN_UP_FAILED_CACHE_EXTRACT` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、キャッシュ抽出の失敗を検出し、残された部分的なキャッシュコンテンツをクリーンアップするためのコマンドがビルドスクリプトに挿入されます。 | | `FF_USE_WINDOWS_JOB_OBJECT` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、RunnerがShell executorとカスタムexecutorを使用してWindows上に作成するプロセスごとに、ジョブオブジェクトが作成されます。プロセスを強制終了するために、Runnerはジョブオブジェクトを閉じます。これにより、強制終了が困難なプロセスの終了が改善されます。 | | `FF_TIMESTAMPS` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 無効にすると、各ログトレース行の先頭にタイムスタンプは追加されません。 | | `FF_DISABLE_AUTOMATIC_TOKEN_ROTATION` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、自動トークンローテーションが制限され、トークンの有効期限が近づくと警告がログに記録されます。 | | `FF_USE_LEGACY_GCS_CACHE_ADAPTER` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、従来のGCSキャッシュアダプターが使用されます。無効にすると(デフォルト)、認証にGoogle Cloud StorageのSDKを使用する新しいGCSキャッシュアダプターが使用されます。これにより、GKEのワークロードID設定など、従来のアダプターでは解決が困難だった環境での認証の問題が解決されます。 | | `FF_DISABLE_UMASK_FOR_KUBERNETES_EXECUTOR` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Kubernetes executorで実行されるジョブに対する`umask 0000`呼び出しが削除されます。代わりに、Runnerはビルドコンテナの実行ユーザーのユーザーID(UID)とグループID(GID)を検出します。またRunnerは、(ソースの更新、キャッシュの復元、およびアーティファクトのダウンロード後に)定義済みのコンテナで`chown`コマンドを実行することにより、作業ディレクトリとファイルの所有権を変更します。 | | `FF_USE_LEGACY_S3_CACHE_ADAPTER` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、従来のS3キャッシュアダプターが使用されます。無効にすると(デフォルト)、認証にAmazonのS3 SDKを使用する新しいS3キャッシュアダプターが使用されます。これにより、カスタムSTSエンドポイントなど、従来のアダプターでは解決が困難だった環境での認証の問題が解決されます。 | | `FF_GIT_URLS_WITHOUT_TOKENS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Git設定またはコマンドの実行中にGitLab Runnerはジョブトークンをどこにも埋め込みません。代わりに、環境変数を使用してジョブトークンを取得するGit認証情報ヘルパーをセットアップします。このアプローチではトークンの保存が制限され、トークンリークのリスクが軽減されます。 | | `FF_WAIT_FOR_POD_TO_BE_REACHABLE` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Runnerはポッド状態が「Running」になるまで、およびポッドに証明書がアタッチされた状態で準備が整うまで待機します。 | | `FF_MASK_ALL_DEFAULT_TOKENS` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、GitLab Runnerはすべてのデフォルトトークンパターンを自動的にマスクします。 | | `FF_EXPORT_HIGH_CARDINALITY_METRICS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Runnerはカーディナリティが高いメトリクスをエクスポートします。大量のデータをインジェストすることを避けるために、この機能フラグを有効にする場合は特に注意する必要があります。詳細については、[フリートスケーリング](../fleet_scaling/_index.md)を参照してください。 | | `FF_USE_FLEETING_ACQUIRE_HEARTBEATS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ジョブがインスタンスに割り当てられる前に、フリートインスタンスの接続が確認されます。 | | `FF_USE_EXPONENTIAL_BACKOFF_STAGE_RETRY` | `true` | {{< icon name="dotted-circle" >}} いいえ | | これが有効の場合、`GET_SOURCES_ATTEMPTS`、`ARTIFACT_DOWNLOAD_ATTEMPTS`、`RESTORE_CACHE_ATTEMPTS`、`EXECUTOR_JOB_SECTION_ATTEMPTS`の再試行では、指数バックオフ(5秒~5分)が使用されます。 | | `FF_USE_ADAPTIVE_REQUEST_CONCURRENCY` | `true` | {{< icon name="dotted-circle" >}} いいえ | | これが有効の場合、`request_concurrency`の設定が最大並行処理値になり、同時リクエスト数はジョブリクエストの成功率に基づいて調整されます。 | | `FF_USE_GITALY_CORRELATION_ID` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、すべてのGit HTTPリクエストに`X-Gitaly-Correlation-ID`ヘッダーが追加されます。無効にすると、Git操作はGitaly Correlation IDヘッダーなしで実行されます。 | | `FF_USE_GIT_PROACTIVE_AUTH` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、Runnerは`http.proactiveAuth=basic` Gitの設定オプションを`git clone`および`git fetch`コマンドに渡します。その結果、Gitは`401`応答を待つ代わりに、認証情報を積極的に送信します。この動作により、パブリックプロジェクトに対してユーザー名がGitalyに伝播されることが保証されます。 | | `FF_HASH_CACHE_KEYS` | `false` | {{< icon name="dotted-circle" >}} いいえ | | GitLab Runnerがキャッシュを作成または抽出する際に、ローカルと分散キャッシュ(S3など)の両方に対して、使用前にキャッシュキーをハッシュします(SHA256)。詳細については、[キャッシュキーの処理](advanced-configuration.md#cache-key-handling)を参照してください。 | | `FF_ENABLE_JOB_INPUTS_INTERPOLATION` | `true` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ジョブの入力が補間されます。詳細については、[&17833](https://gitlab.com/groups/gitlab-org/-/epics/17833)を参照してください。 | | `FF_USE_JOB_ROUTER` | `false` | {{< icon name="dotted-circle" >}} いいえ | | GitLab RunnerがGitLabに直接接続するのではなく、ジョブルーターに接続してジョブをフェッチするようにします。 | | `FF_SCRIPT_TO_STEP_MIGRATION` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、ユーザースクリプトはステップに移行され、ステップRunnerで実行されます。 | | `FF_CONCRETE` | `false` | {{< icon name="dotted-circle" >}} いいえ | | 有効にすると、従来のスクリプト実行はstep-runnerに移行され、step-runnerで実行されます。 | ## パイプライン設定で機能フラグを有効にする {#enable-feature-flag-in-pipeline-configuration} [CI/CD変数](https://docs.gitlab.com/ci/variables/)を使用して、機能フラグを有効にできます: - パイプライン内のすべてのジョブ(グローバル): ```yaml variables: FEATURE_FLAG_NAME: 1 ``` - 単一ジョブ: ```yaml job: stage: test variables: FEATURE_FLAG_NAME: 1 script: - echo "Hello" ``` ## Runner環境変数で機能フラグを有効にする {#enable-feature-flag-in-runner-environment-variables} Runnerが実行するすべてのジョブで機能を有効にするには、[Runner設定](advanced-configuration.md)で機能フラグを[`environment`](advanced-configuration.md#the-runners-section)変数として指定します: ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" limit = 0 executor = "docker" builds_dir = "" shell = "" environment = ["FEATURE_FLAG_NAME=1"] ``` ## Runner設定で機能フラグを有効にする {#enable-feature-flag-in-runner-configuration} 機能フラグを有効にするには、`[runners.feature_flags]`に機能フラグを指定します。この設定では、ジョブが機能フラグの値を上書きすることを防止できます。 一部の機能フラグは、ジョブの実行方法に対処しないため、この設定を行うときにのみ使用できます。 ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" executor = "docker" [runners.feature_flags] FF_USE_DIRECT_DOWNLOAD = true ``` ================================================ FILE: docs-locale/ja-jp/configuration/gpus.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: グラフィカルプロセッシングユニット(GPU)の使用 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< history >}} - GitLab Runner 13.9で導入。 {{< /history >}} GitLab Runnerは、グラフィカルプロセッシングユニット(GPU)の使用をサポートしています。次のセクションでは、さまざまなexecutorに対してGPUを有効にするために必要な設定について説明します。 ## Shell executor {#shell-executor} 必要なRunnerの設定はありません。 ## Docker executor {#docker-executor} {{< alert type="warning" >}} Podmanをコンテナのランタイムエンジンとして使用している場合、GPUは検出されません。詳細については、[issue 39095](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/39095)を参照してください。 {{< /alert >}} 前提条件: - [NVIDIAドライバー](https://docs.nvidia.com/datacenter/tesla/driver-installation-guide/index.html)をインストールします。 - [NVIDIAコンテナツールキット](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)をインストールします。 [`runners.docker`セクション](advanced-configuration.md#the-runnersdocker-section)で、`gpus`または`service_gpus`の設定オプションを使用します: ```toml [runners.docker] gpus = "all" service_gpus = "all" ``` ## Docker Machine executor {#docker-machine-executor} [Docker MachineのGitLabフォークのドキュメント](../executors/docker_machine.md#using-gpus-on-google-compute-engine)を参照してください。 ## Kubernetes executor {#kubernetes-executor} 前提条件: - [ノードセレクターがGPUをサポートするノードを選択](https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/)していることを確認してください。 - `FF_USE_ADVANCED_POD_SPEC_CONFIGURATION`機能フラグを有効にします。 GPUサポートを有効にするには、ポッドの仕様でGPUリソースをリクエストするようにRunnerを設定します。例: ```toml [[runners.kubernetes.pod_spec]] name = "gpu" patch = ''' containers: - name: build resources: requests: nvidia.com/gpu: 1 limits: nvidia.com/gpu: 1 ''' patch_type = "strategic" # <--- `strategic` patch_type ``` ジョブの要件に基づいて、`requests`および`limits`のGPU数を調整します。 GitLab Runnerは、[Amazon Elastic Kubernetes Serviceでテスト](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4355)されており、[GPU対応のインスタンス](https://docs.aws.amazon.com/dlami/latest/devguide/gpu.html)を備えています。 ## GPUが有効になっていることを検証する {#validate-that-gpus-are-enabled} NVIDIA GPUでRunnerを使用できます。NVIDIA GPUの場合、CIジョブに対してGPUが有効になっていることを確認する方法の1つは、スクリプトの先頭で`nvidia-smi`を実行することです。例: ```yaml train: script: - nvidia-smi ``` GPUが有効になっている場合、`nvidia-smi`の出力には、使用可能なデバイスが表示されます。次の例では、単一のNVIDIA Tesla P4が有効になっています: ```shell +-----------------------------------------------------------------------------+ | NVIDIA-SMI 450.51.06 Driver Version: 450.51.06 CUDA Version: 11.0 | |-------------------------------+----------------------+----------------------+ | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC | | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | | | | MIG M. | |===============================+======================+======================| | 0 Tesla P4 Off | 00000000:00:04.0 Off | 0 | | N/A 43C P0 22W / 75W | 0MiB / 7611MiB | 3% Default | | | | N/A | +-------------------------------+----------------------+----------------------+ +-----------------------------------------------------------------------------+ | Processes: | | GPU GI CI PID Type Process name GPU Memory | | ID ID Usage | |=============================================================================| | No running processes found | +-----------------------------------------------------------------------------+ ``` ハードウェアがGPUをサポートしていない場合、`nvidia-smi`が見つからないか、ドライバーと通信できないため、失敗するはずです: ```shell modprobe: ERROR: could not insert 'nvidia': No such device NVIDIA-SMI has failed because it couldn't communicate with the NVIDIA driver. Make sure that the latest NVIDIA driver is installed and running. ``` ================================================ FILE: docs-locale/ja-jp/configuration/init.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerのシステムサービス --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、基盤となるOSを検出し、最終的に初期化システムに基づいてサービスファイルをインストールするために、[Go言語の`service`ライブラリ](https://github.com/kardianos/service)を使用します。 {{< alert type="note" >}} パッケージ`service`は、プログラムをサービス(デーモン)としてインストール、アンインストール、起動、停止、および実行します。Windows XP +、Linux(systemd、Upstart、およびSystem V)、およびmacOS(`launchd`)がサポートされています。 {{< /alert >}} GitLab Runnerが[インストールされる](../install/_index.md)と、サービスファイルが自動的に作成されます: - **systemd**:`/etc/systemd/system/gitlab-runner.service` - **Upstart**:`/etc/init/gitlab-runner` ## カスタム環境変数 {#setting-custom-environment-variables} カスタム環境変数を使用してGitLab Runnerを実行できます。たとえば、Runnerの環境変数に`GOOGLE_APPLICATION_CREDENTIALS`を定義するとします。このアクションは、[`environment`設定](advanced-configuration.md#the-runners-section)とは異なります。これは、Runnerによって実行されるすべてのジョブに自動的に追加される変数を定義します。 ### systemdのカスタマイズ {#customizing-systemd} systemdを使用するRunnerの場合は、エクスポートする変数ごとに1つの`Environment=key=value`行を使用して、`/etc/systemd/system/gitlab-runner.service.d/env.conf`を作成します。 次に例を示します: ```toml [Service] Environment=GOOGLE_APPLICATION_CREDENTIALS=/etc/gitlab-runner/gce-credentials.json ``` 次に、設定をリロードします: ```shell systemctl daemon-reload systemctl restart gitlab-runner.service ``` ### Upstartのカスタマイズ {#customizing-upstart} Upstartを使用するRunnerの場合は、`/etc/init/gitlab-runner.override`を作成し、目的の変数をエクスポートします。 次に例を示します: ```shell export GOOGLE_APPLICATION_CREDENTIALS="/etc/gitlab-runner/gce-credentials.json" ``` これを有効にするには、Runnerを再起動します。 ## デフォルトの停止動作のオーバーライド {#overriding-default-stopping-behavior} 場合によっては、サービスのデフォルトの動作をオーバーライドすることが必要な場合があります。 たとえば、GitLab Runnerをアップグレードするときは、実行中のすべてのジョブが完了するまで、正常に停止する必要があります。ただし、systemd、Upstart、またはその他のサービスは、気付かなくてもすぐにプロセスを再起動する可能性があります。 そのため、GitLab Runnerをアップグレードすると、インストールスクリプトは、当時新しいジョブを処理していた可能性のあるRunnerプロセスを強制終了して再起動します。 ### systemdのオーバーライド {#overriding-systemd} systemdを使用するRunnerの場合は、次のコンテンツを含む`/etc/systemd/system/gitlab-runner.service.d/kill.conf`を作成します: ```toml [Service] TimeoutStopSec=7200 KillSignal=SIGQUIT ``` これらの2つの設定をsystemdユニット設定に追加すると、Runnerを停止できます。Runnerが停止した後、systemdは`SIGQUIT`を強制終了シグナルとして使用して、プロセスを停止します。さらに、停止コマンドに2時間のタイムアウトが設定されています。このタイムアウトの前にジョブが正常に終了しない場合、systemdは`SIGKILL`を使用してプロセスを強制終了します。 ### Upstartのオーバーライド {#overriding-upstart} Upstartを使用するRunnerの場合は、次のコンテンツを含む`/etc/init/gitlab-runner.override`を作成します: ```shell kill signal SIGQUIT kill timeout 7200 ``` これらの2つの設定をUpstartユニット設定に追加すると、Runnerを停止できます。Upstartは上記のsystemdと同じことを行います。 ================================================ FILE: docs-locale/ja-jp/configuration/macos_setup.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: macOS Runnerをセットアップする --- macOS RunnerでCI/CDジョブを実行するには、次の手順を順番に実行します。 完了すると、GitLab RunnerがmacOSマシン上で実行され、個々のRunnerがジョブを処理できるようになります。 - システムShellをBashに変更します。 - Homebrew、rbenv、およびGitLab Runnerをインストールします。 - rbenvを設定し、Rubyをインストールします。 - Xcodeをインストールします。 - Runnerを登録します。 - CI/CDを設定します。 ## 前提条件 {#prerequisites} はじめる前: - macOSの最新バージョンをインストールします。このガイドは11.4で開発されました。 - ターミナルまたはSSHでマシンにアクセスできることを確認します。 ## システムShellをBashに変更する {#change-the-system-shell-to-bash} 新しいバージョンのmacOSでは、デフォルトのShellとしてZshが使用されます。ただし、RunnerのShell executorでは、Bash固有の構文と機能を使用するものが多いため、CI/CDスクリプトが正しく実行されるようにBashが必要です。 1. マシンに接続し、デフォルトのShellを確認します: ```shell echo $SHELL ``` 1. 結果が`/bin/bash`でない場合は、次を実行してShellを変更します: ```shell chsh -s /bin/bash ``` 1. パスワードを入力します。 1. ターミナルを再起動するか、SSHを使用して再接続します。 1. `echo $SHELL`をもう一度実行します。結果は`/bin/bash`になるはずです。 ## Homebrew、rbenv、GitLab Runnerをインストールする {#install-homebrew-rbenv-and-gitlab-runner} Runnerがマシンに接続してジョブを実行するには、特定の環境オプションが必要です。 1. [Homebrew](https://brew.sh/)パッケージマネージャーをインストールします: ```shell /bin/bash -c "$(curl "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")" ``` 1. Rubyバージョンマネージャーである[`rbenv`](https://github.com/rbenv/rbenv)とGitLab Runnerをセットアップします: ```shell brew install rbenv gitlab-runner brew services start gitlab-runner ``` ## rbenvを設定してRubyをインストールする {#configure-rbenv-and-install-ruby} rbenvを設定し、Rubyをインストールします。 1. rbenvをBash環境に追加します: ```shell echo 'if which rbenv > /dev/null; then eval "$(rbenv init -)"; fi' >> ~/.bash_profile source ~/.bash_profile ``` 1. Ruby 3.3.xをインストールし、マシン全体のデフォルトとして設定します: ```shell rbenv install 3.3.4 rbenv global 3.3.4 ``` ## Xcodeをインストールします {#install-xcode} Xcodeをインストールして設定します。 1. 次のいずれかの場所に移動して、Xcodeをインストールします: - Apple App Store。 - [Apple Developer Portal](https://developer.apple.com/)。 - [`xcode-install`](https://github.com/xcpretty/xcode-install)。このプロジェクトは、コマンドラインからさまざまなAppleの依存関係を簡単にダウンロードできるようにすることを目的としています。 1. ライセンスに同意し、推奨される追加コンポーネントをインストールします。これを行うには、Xcodeを開いてプロンプトに従うか、ターミナルで次のコマンドを実行します: ```shell sudo xcodebuild -runFirstLaunch ``` 1. Xcodeがビルド中に適切なコマンドラインツールを読み込むように、アクティブなデベロッパーディレクトリを更新します: ```shell sudo xcode-select -s /Applications/Xcode.app/Contents/Developer ``` ### プロジェクトRunnerを作成して登録する {#create-and-register-a-project-runner} [プロジェクトRunnerを作成して登録](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token)します。 Runnerを作成して登録するとき: - GitLabで、タグ`macos`を追加して、macOSジョブがこのmacOSマシンで実行されるようにします。 - コマンドラインで、`shell`を[executor](../executors/_index.md)として選択します。 Runnerを登録すると、コマンドラインに成功メッセージが表示されます: ```shell Runner registered successfully. Feel free to start it, but if it's running already the config should be automatically reloaded! ``` Runnerを表示するには: 1. 上部のバーで、**検索または移動先**を選択して、プロジェクトまたはグループを見つけます。 1. **設定 > CI/CD**を選択します。 1. **Runner**を展開します。 ### CI/CDを設定する {#configure-cicd} GitLabプロジェクトで、CI/CDを設定してビルドを開始します。このサンプルの`.gitlab-ci.yml`ファイルを使用できます。タグが、Runnerの登録に使用したタグと一致することを確認してください。 ```yaml stages: - build - test variables: LANG: "en_US.UTF-8" before_script: - gem install bundler - bundle install - gem install cocoapods - pod install build: stage: build script: - bundle exec fastlane build tags: - macos test: stage: test script: - bundle exec fastlane test tags: - macos ``` macOS Runnerは、プロジェクトをビルドする必要があります。 ================================================ FILE: docs-locale/ja-jp/configuration/oracle_cloud_performance.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Oracle Cloud Infrastructure用のGitLab Runnerの設定 --- Container Runtime Interface (CRI) を使用するOracle Cloud Infrastructure (OCI) 環境で実行されるGitLabコード品質ジョブでは、パフォーマンスの低下が発生する可能性があります。 OCIでのGitLab Runnerのパフォーマンスを最適化するには、次の手順に従います: 1. 空のディレクトリボリュームをGitLab Runnerの設定に追加します。 1. `.gitlab-ci.yml`ファイルで特定のDockerドライバー設定を設定します。 この設定は、以下の環境に適用されます: - クラウドプロバイダー: Oracle Cloud Infrastructure (OCI) - ランタイム: Container Runtime Interface (CRI) - プロセス: GitLabコード品質ジョブ - Runnerタイプ: GitLab Self-Managed Runners ## 空のディレクトリボリュームを追加 {#add-an-empty-directory-volume} GitLab Runnerの設定用に空のディレクトリを定義するには、次のブロックを`values.yaml`ファイルのrunnersセクションに追加します: ```yaml [[runners.kubernetes.volumes.empty_dir]] mount_path = "/var/lib" name = "docker-data" ``` ### Runnerの設定例 {#example-runner-configuration} 次の例は、修正を含むGitLab Runnerの完全なHelmチャート`values.yaml`を示しています: ```yaml image: registry: registry.gitlab.com image: gitlab-org/gitlab-runner tag: alpine-v16.11.0 useTini: false imagePullPolicy: IfNotPresent gitlabUrl: https://gitlab.com/ runnerToken: "" terminationGracePeriodSeconds: 3600 concurrent: 100 shutdown_timeout: 0 checkInterval: 5 logLevel: debug sessionServer: enabled: false ## For RBAC support: rbac: create: true rules: [] clusterWideAccess: false podSecurityPolicy: enabled: false resourceNames: - gitlab-runner metrics: enabled: false portName: metrics port: 9252 serviceMonitor: enabled: false service: enabled: false type: ClusterIP runners: config: | [[runners]] output_limit = 200960 [runners.kubernetes] privileged = true allow_privilege_escalation = true namespace = "{{.Release.Namespace}}" image = "ubuntu:22.04" helper_image_flavor = "ubuntu" pull_policy = "if-not-present" executor = "kubernetes" [[runners.kubernetes.volumes.host_path]] name = "buildah" mount_path = "/var/lib/containers/storage" read_only = false [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] mount_path = "/var/lib" name = "docker-data" [[runners.kubernetes.services]] alias = "dind" command = [ "--host=tcp://0.0.0.0:2375", "--host=unix://var/run/docker.sock", ] [runners.cache] Type = "s3" Path = "gitlab_runner" Shared = true [runners.cache.s3] BucketName = "gitlab-shared-caching" BucketLocation = "ap-singapore-1" ServerAddress = ".compat.objectstorage.ap-singapore-1.oraclecloud.com" AccessKey = "" SecretKey = "" configPath: "" tags: "" cache: {} securityContext: allowPrivilegeEscalation: false readOnlyRootFilesystem: false runAsNonRoot: true privileged: false capabilities: drop: ["ALL"] strategy: {} podSecurityContext: runAsUser: 100 fsGroup: 65533 resources: {} affinity: {} topologySpreadConstraints: {} nodeSelector: {} tolerations: [] hostAliases: [] deploymentAnnotations: {} deploymentLabels: {} podAnnotations: {} podLabels: {} priorityClassName: "" secrets: [] configMaps: {} volumeMounts: [] volumes: [] ``` ## `.gitlab-ci.yml`ファイルを更新します {#update-your-gitlab-ciyml-file} デフォルトの`overlay2`ドライバーの選択を解除するには、次のキーを空の変数として既存のコード品質ジョブに追加します: ```shell DOCKER_DRIVER: "" ``` ### コード品質ジョブ設定の例 {#example-code-quality-job-configuration} 次の例は、`.gitlab-ci.yml`ファイルのコード品質ジョブ設定を示しています: ```yaml code_quality: services: - name: $CODE_QUALITY_DIND_IMAGE command: ['--tls=false', '--host=tcp://0.0.0.0:2375'] variables: CODECLIMATE_PREFIX: $CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX/ CODECLIMATE_REGISTRY_USERNAME: $CI_DEPENDENCY_PROXY_USER CODECLIMATE_REGISTRY_PASSWORD: $CI_DEPENDENCY_PROXY_PASSWORD DOCKER_DRIVER: "" ``` ================================================ FILE: docs-locale/ja-jp/configuration/proxy.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: プロキシの背後でGitLab Runnerを実行する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} このガイドは、Docker executorでGitLab Runnerをプロキシの背後で動作させることに特化しています。 続行する前に、[Dockerがインストール](https://docs.docker.com/get-started/get-docker/)され、同じマシンに[GitLab Runner](../install/_index.md)がインストールされていることを確認してください。 ## `cntlm`の設定 {#configuring-cntlm} {{< alert type="note" >}} すでに認証なしでプロキシを使用している場合は、このセクションはオプションであり、[Dockerの設定](#configuring-docker-for-downloading-images)に直接スキップできます。`cntlm`の設定は、認証付きのプロキシの背後にいる場合にのみ必要ですが、いずれにしても使用することをお勧めします。 {{< /alert >}} [`cntlm`](https://github.com/versat/cntlm)はローカルプロキシとして使用できるLinuxプロキシであり、プロキシの詳細を手動で追加するのに比べて、次の2つの大きな利点があります: - 変更する必要がある認証情報は1つのソースのみ - 認証情報はDocker Runnerからアクセスできません [`cntlm`をインストール](https://www.howtoforge.com/linux-ntlm-authentication-proxy-isa-server-with-cntlm)したと仮定して、最初に設定する必要があります。 ### `cntlm`が`docker0`インターフェースをリッスンするようにする {#make-cntlm-listen-to-the-docker0-interface} セキュリティを強化し、インターネットから保護するために、`cntlm`をバインドして、コンテナが到達できるIPアドレスを持つ`docker0`インターフェースでリッスンします。Dockerホスト上の`cntlm`にこのアドレスのみにバインドするように指示すると、Dockerコンテナはそれに到達できますが、外部には到達できません。 1. Dockerが使用しているIPを見つけます: ```shell ip -4 -oneline addr show dev docker0 ``` IPアドレスは通常`172.17.0.1`です。これを`docker0_interface_ip`と呼びましょう。 1. `cntlm` (`/etc/cntlm.conf`) の設定ファイルを開きます。ユーザー名、パスワード、ドメイン、プロキシホストを入力し、前の手順で見つけた`Listen` IPアドレスを設定します。次のようになります: ```plaintext Username testuser Domain corp-uk Password password Proxy 10.0.0.41:8080 Proxy 10.0.0.42:8080 Listen 172.17.0.1:3128 # Change to your docker0 interface IP ``` 1. 変更を保存して、サービスを再起動します: ```shell sudo systemctl restart cntlm ``` ## イメージをダウンロードするためのDockerの設定 {#configuring-docker-for-downloading-images} {{< alert type="note" >}} 以下は、systemdをサポートするOSに適用されます。 {{< /alert >}} プロキシの使用方法については、[Dockerドキュメント](https://docs.docker.com/engine/daemon/proxy/)を参照してください。 サービスファイルは次のようになります: ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" ``` ## GitLab Runner設定へのプロキシ変数の追加 {#adding-proxy-variables-to-the-gitlab-runner-configuration} プロキシ変数は、プロキシの背後からGitLab.comに接続できるように、GitLab Runner設定にも追加する必要があります。 このアクションは、上記のプロキシをDockerサービスに追加するのと同じです: 1. `gitlab-runner`サービスのsystemdドロップインディレクトリを作成します: ```shell mkdir /etc/systemd/system/gitlab-runner.service.d ``` 1. `/etc/systemd/system/gitlab-runner.service.d/http-proxy.conf`というファイルを作成して、`HTTP_PROXY`環境変数を追加します: ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" ``` GitLab RunnerをGitLab Self-Managedインスタンスのような内部URLに接続するには、`NO_PROXY`環境変数の値を設定します。 ```ini [Service] Environment="HTTP_PROXY=http://docker0_interface_ip:3128/" Environment="HTTPS_PROXY=http://docker0_interface_ip:3128/" Environment="NO_PROXY=gitlab.example.com" ``` 1. ファイルを保存して、変更をフラッシュします: ```shell systemctl daemon-reload ``` 1. GitLab Runnerを再起動します: ```shell sudo systemctl restart gitlab-runner ``` 1. 設定が読み込まれたことを確認します: ```shell systemctl show --property=Environment gitlab-runner ``` 以下が表示されるはずです: ```ini Environment=HTTP_PROXY=http://docker0_interface_ip:3128/ HTTPS_PROXY=http://docker0_interface_ip:3128/ ``` ## Dockerコンテナへのプロキシの追加 {#adding-the-proxy-to-the-docker-containers} [Runnerを登録](../register/_index.md)した後、プロキシ設定をDockerコンテナに伝播させることができます(たとえば、`git clone`など)。 これを行うには、`/etc/gitlab-runner/config.toml`を編集し、次の内容を`[[runners]]`セクションに追加する必要があります: ```toml pre_get_sources_script = "git config --global http.proxy $HTTP_PROXY; git config --global https.proxy $HTTPS_PROXY" environment = ["https_proxy=http://docker0_interface_ip:3128", "http_proxy=http://docker0_interface_ip:3128", "HTTPS_PROXY=docker0_interface_ip:3128", "HTTP_PROXY=docker0_interface_ip:3128"] ``` ここで、`docker0_interface_ip`は`docker0`インターフェースのIPアドレスです。 {{< alert type="note" >}} この例では、特定のプログラムが`HTTP_PROXY`を予期し、他のプログラムが`http_proxy`を予期するため、小文字と大文字の両方の変数を設定しています。残念ながら、この種の環境変数には[標準](https://unix.stackexchange.com/questions/212894/whats-the-right-format-for-the-http-proxy-environment-variable-caps-or-no-ca#212972)がありません。 {{< /alert >}} ## `dind`サービス使用時のプロキシ設定 {#proxy-settings-when-using-dind-service} [Docker-in-Docker executor](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker)(`dind`)を使用する場合、`docker:2375,docker:2376`を`NO_PROXY`環境変数で指定する必要がある場合があります。ポートは必須です。そうしないと、`docker push`がブロックされます。 `dind`の`dockerd`とローカル`docker`クライアント間の通信(こちらで説明:)は、ルートのDocker設定に保持されているプロキシ変数を使用します。 これを設定するには、`/root/.docker/config.json`を編集して、完全なプロキシ設定を含める必要があります(例:): ```json { "proxies": { "default": { "httpProxy": "http://proxy:8080", "httpsProxy": "http://proxy:8080", "noProxy": "docker:2375,docker:2376" } } } ``` Docker executorのコンテナに設定を渡すには、`$HOME/.docker/config.json`もコンテナ内に作成する必要があります。これは、たとえば、`.gitlab-ci.yml`の`before_script`としてスクリプト化できます: ```yaml before_script: - mkdir -p $HOME/.docker/ - 'echo "{ \"proxies\": { \"default\": { \"httpProxy\": \"$HTTP_PROXY\", \"httpsProxy\": \"$HTTPS_PROXY\", \"noProxy\": \"$NO_PROXY\" } } }" > $HOME/.docker/config.json' ``` または、影響を受ける`gitlab-runner`(`/etc/gitlab-runner/config.toml`)の設定で、: ```toml [[runners]] pre_build_script = "mkdir -p $HOME/.docker/ && echo \"{ \\\"proxies\\\": { \\\"default\\\": { \\\"httpProxy\\\": \\\"$HTTP_PROXY\\\", \\\"httpsProxy\\\": \\\"$HTTPS_PROXY\\\", \\\"noProxy\\\": \\\"$NO_PROXY\\\" } } }\" > $HOME/.docker/config.json" ``` {{< alert type="note" >}} TOMLファイル内で単一の文字列として指定されたシェルを使用してJSONファイルが作成されるため、追加レベルのエスケープ`"`が必要です。これはYAMLではないため、`:`をエスケープしないでください。 {{< /alert >}} `NO_PROXY`リストを拡張する必要がある場合、ワイルドカード`*`はサフィックスに対してのみ機能し、プレフィックスまたはCIDR表記では機能しません。詳細については、およびを参照してください。 ## レート制限されたリクエストの処理 {#handling-rate-limited-requests} GitLabインスタンスは、悪用を防ぐためにAPIリクエストに対するレート制限があるリバースプロキシの背後にある可能性があります。GitLab RunnerはAPIに複数のリクエストを送信し、これらのレート制限を超える可能性があります。 その結果、GitLab Runnerは、次の[再試行ロジック](#retry-logic)を使用して、レート制限されたシナリオを処理します: ### 再試行ロジック {#retry-logic} GitLab Runnerが`429 Too Many Requests`応答を受信すると、この再試行シーケンスに従います: 1. Runnerは、応答ヘッダーで`RateLimit-ResetTime`ヘッダーを確認します。 - `RateLimit-ResetTime`ヘッダーには、`Wed, 21 Oct 2015 07:28:00 GMT`のような有効なHTTP日付(RFC1123)である値が必要です。 - ヘッダーが存在し、有効な値がある場合、Runnerは指定された時間まで待機し、別のリクエストを発行します。 1. `RateLimit-ResetTime`ヘッダーが無効または欠落している場合、Runnerは応答ヘッダーで`Retry-After`ヘッダーを確認します。 - `Retry-After`ヘッダーには、`Retry-After: 30`のような秒形式の値が必要です。 - ヘッダー形式が存在し、有効な値がある場合、Runnerは指定された時間まで待機し、別のリクエストを発行します。 1. 両方のヘッダーがないか無効な場合、Runnerはデフォルトの間隔を待機し、別のリクエストを発行します。 Runnerは、失敗したリクエストを最大5回再試行します。すべての再試行が失敗した場合、Runnerは最終応答からのエラーをログに記録します。 ### サポートされているヘッダー形式 {#supported-header-formats} | ヘッダー | 形式 | 例 | |-----------------------|---------------------|---------------------------------| | `RateLimit-ResetTime` | HTTP日付(RFC1123) | `Wed, 21 Oct 2015 07:28:00 GMT` | | `Retry-After` | 秒 | `30` | {{< alert type="note" >}} ヘッダー`RateLimit-ResetTime`は、すべてのヘッダーキーが[`http.CanonicalHeaderKey`](https://pkg.go.dev/net/http#CanonicalHeaderKey)関数を介して実行されるため、大文字と小文字が区別されません。 {{< /alert >}} ================================================ FILE: docs-locale/ja-jp/configuration/runner_autoscale_aws/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: AWS EC2でRunnerのDocker Machineオートスケールを設定する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerの最大の利点の1つは、ビルドがすぐに処理されるようにするために、VMを自動的に起動および停止できることです。これは優れた機能であり、適切に使用すれば、Runnerを常時使用していない場合に、費用対効果が高くスケーラブルなソリューションが必要な状況で非常に役立ちます。 ## はじめに {#introduction} このチュートリアルでは、AWSでGitLab Runnerを適切に設定する方法について説明します。AWSのインスタンスは、新しいDockerインスタンスをオンデマンドで起動するRunnerマネージャーとして機能します。これらのインスタンスのRunnerは自動的に作成されます。Runnerはこのガイドで説明されているパラメータを使用します。作成後の手動設定は必要ありません。 さらに[AmazonのEC2スポットインスタンス](https://aws.amazon.com/ec2/spot/)を利用することで、非常に強力なオートスケールマシンを使用しながら、GitLab Runnerインスタンスのコストを大幅に削減できます。 ## 前提条件 {#prerequisites} 設定のほとんどがAWSで行われるため、Amazon Web Services(AWS)に関する知識が必要です。 Docker Machineの[`amazonec2`ドライバーのドキュメント](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md)をざっと読んで、この記事で後述するパラメータを理解しておくことをお勧めします。 GitLab Runnerはネットワーク経由でGitLabインスタンスと通信する必要があります。このことは、AWSセキュリティグループを設定する場合やDNS設定を行う場合に考慮する必要があります。 たとえば、ネットワークセキュリティを強化するために、EC2リソースを別のVPCでパブリックトラフィックから分離できます。ご使用の環境は異なる可能性があるため、状況に対して最適なものを検討してください。 ### AWSセキュリティグループ {#aws-security-groups} Docker Machineは、Dockerデーモンとの通信に必要なポート`2376`およびSSH `22`のルールと[デフォルトのセキュリティグループ](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md/#security-group)を使用しようとします。Dockerに依存する代わりに、必要なルールを使用してセキュリティグループを作成し、[下記](#the-runnersmachine-section)で説明するように、GitLab Runnerオプションでそのグループを指定できます。これにより、ネットワーク環境に基づいて、好みに合わせて事前にカスタマイズできます。[Runnerマネージャーインスタンス](#prepare-the-runner-manager-instance)からポート`2376`と`22`にアクセスできることを確認する必要があります。 ### AWS認証情報 {#aws-credentials} キャッシュのスケール(EC2)とキャッシュの更新(S3経由)の権限を持つユーザーに関連付けられている[AWSアクセスキー](https://docs.aws.amazon.com/IAM/latest/UserGuide/security-creds.html)が必要です。EC2(AmazonEC2FullAccess)およびS3の[ポリシー](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-policies-for-amazon-ec2.html)を使用して新しいユーザーを作成します。S3に必要な最小限の権限の詳細については、[`runners.cache.s3`](../advanced-configuration.md#the-runnerscaches3-section)を参照してください。セキュリティを強化するために、そのユーザーのコンソールログインを無効にできます。タブを開いたままにするか、後で[GitLab Runnerの設定](#the-runnersmachine-section)で使用するためにセキュリティ認証情報をエディタにコピーして貼り付けます。 必要な`AmazonEC2FullAccess`ポリシーと`AmazonS3FullAccess`ポリシーを使用して[EC2インスタンスプロファイル](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html)を作成することもできます。 ジョブの実行のために新しいEC2インスタンスをプロビジョニングするには、このインスタンスプロファイルをRunnerマネージャーEC2インスタンスにアタッチします。Runnerマシンがインスタンスプロファイルを使用している場合は、Runnerマネージャーのインスタンスプロファイルに`iam:PassRole`アクションを含めます。 例: ```json { "Statement": [ { "Action": "iam:PassRole", "Effect": "Allow", "Resource": "arn:aws:iam:::role/instance-profile-of-runner-machine" } ], "Version": "2012-10-17" } ``` ## Runnerマネージャーインスタンスを準備する {#prepare-the-runner-manager-instance} 最初に、新しいマシンを起動するRunnerマネージャーとして機能するEC2インスタンスにGitLab Runnerをインストールします。DockerとGitLab Runnerの両方がサポートするディストリビューション(Ubuntu、Debian、CentOS、RHELなど)を選択します。 Runnerマネージャーインスタンス自体はジョブを実行しないため、これは強力なマシンである必要はありません。最初の設定では、小さなインスタンスから開始できます。このマシンは常に稼働している必要があるため、専任ホストです。したがって、継続的なベースラインコストがかかるのはこのホストだけです。 前提条件をインストールします。 1. サーバーにログインします 1. [GitLabの公式リポジトリからGitLab Runnerをインストールします](../../install/linux-repository.md) 1. [Dockerをインストールします](https://docs.docker.com/engine/install/#server) 1. [GitLabフォークからDocker Machineをインストールします](https://gitlab.com/gitlab-org/ci-cd/docker-machine)(DockerではDocker Machineが非推奨になりました) Runnerがインストールされたので、次にRunnerを登録します。 ## GitLab Runnerを登録する {#registering-the-gitlab-runner} GitLab Runnerを設定する前に、最初にGitLab Runnerを登録して、GitLabインスタンスに接続する必要があります。 1. [Runnerトークンを取得します](https://docs.gitlab.com/ci/runners/) 1. [Runnerを登録します](../../register/_index.md) 1. executorの種類を尋ねられたら、`docker+machine`と入力します これで、最も重要な部分であるGitLab Runnerの設定に進むことができます。 {{< alert type="note" >}} インスタンス内のすべてのユーザーが、オートスケールされたRunnerを使用できるようにする場合は、Runnerを共有Runnerとして登録します。 {{< /alert >}} ## Runnerを設定する {#configuring-the-runner} Runnerが登録されたので、その設定ファイルを編集してAWS Machineドライバーに必要なオプションを追加する必要があります。 次に設定ファイルの各セクションについて詳しく説明します。 ### グローバルセクション {#the-global-section} グローバルセクションでは、すべてのRunnerで同時に実行できるジョブの制限(`concurrent`)を定義できます。これは、GitLab Runnerが対応するユーザーの数やビルドにかかる時間などのニーズに応じて大きく異なります。最初に`10`のような小さい値を使用し、その後、値を増減できます。 `check_interval`オプションは、RunnerがGitLabで新しいジョブを確認する頻度を秒単位で定義します。 例: ```toml concurrent = 10 check_interval = 0 ``` [その他のオプション](../advanced-configuration.md#the-global-section)も利用できます。 ### `runners`セクション {#the-runners-section} `[[runners]]`セクションで最も重要な設定は`executor`です。これは`docker+machine`に設定する必要があります。これらの設定のほとんどは、Runnerを初めて登録するときに処理されます。 `limit`は、このRunnerが起動するマシン(実行中のマシンおよびアイドル状態のマシン)の最大数を設定します。詳細については、[`limit`、`concurrent`、`IdleCount`の間の関係](../autoscale.md#how-concurrent-limit-and-idlecount-generate-the-upper-limit-of-running-machines)をご確認ください。 例: ```toml [[runners]] name = "gitlab-aws-autoscaler" url = "" token = "" executor = "docker+machine" limit = 20 ``` `[[runners]]`の[その他のオプション](../advanced-configuration.md#the-runners-section)も利用できます。 ### `runners.docker`セクション {#the-runnersdocker-section} `[runners.docker]`セクションでは、[`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/)でDockerイメージが定義されていない場合に子Runnerが使用するデフォルトのDockerイメージを定義できます。`privileged = true`を使用すると、すべてのRunnerが[Docker in Docker](https://docs.gitlab.com/ci/docker/using_docker_build/#use-docker-in-docker)を実行できるようになります。これは、GitLab CI/CDで独自のDockerイメージをビルドする予定がある場合に役立ちます。 次に`disable_cache = true`を使用して、Docker executorの内部キャッシュメカニズムを無効にします。これは、以下のセクションで説明するように分散キャッシュモードを使用するためです。 例: ```toml [runners.docker] image = "alpine" privileged = true disable_cache = true ``` `[runners.docker]`の[その他のオプション](../advanced-configuration.md#the-runnersdocker-section)も利用できます。 ### `runners.cache`セクション {#the-runnerscache-section} ジョブの処理をスピードアップするために、GitLab Runnerは、選択されたディレクトリやファイルを保存し、後続のジョブ間で共有するキャッシュメカニズムを提供します。このセットアップでは必須ではありませんが、GitLab Runnerが提供する分散キャッシュメカニズムを使用することをお勧めします。新しいインスタンスがオンデマンドで作成されるため、キャッシュを保存する共通の場所を確保することが重要です。 次の例ではAmazon S3を使用します。 ```toml [runners.cache] Type = "s3" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "" SecretKey = "" BucketName = "" BucketLocation = "us-west-2" ``` キャッシュメカニズムを詳しく調べるための詳細情報を以下に示します。 - [`runners.cache`のリファレンス](../advanced-configuration.md#the-runnerscache-section) - [`runners.cache.s3`のリファレンス](../advanced-configuration.md#the-runnerscaches3-section) - [GitLab Runnerでのキャッシュサーバーのデプロイと使用](../autoscale.md#distributed-runners-caching) - [キャッシュの仕組み](https://docs.gitlab.com/ci/yaml/#cache) ### `runners.machine`セクション {#the-runnersmachine-section} これは設定で最も重要な部分であり、GitLab Runnerに対して新しいDocker Machineインスタンスを起動または削除する方法とタイミングを指示します。 AWS Machineオプションを中心に説明します。その他の設定については、以下の資料を参照してください。 - [基盤となるオートスケールアルゴリズムとパラメータ](../autoscale.md#autoscaling-algorithm-and-parameters) \- 組織のニーズに応じて異なります。 - [オートスケール期間](../autoscale.md#configure-autoscaling-periods) \- 組織で作業が行われない一定の期間がある場合(週末など)に役立ちます。 以下に`runners.machine`セクションの例を示します。 ```toml [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 10 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-zone=x", "amazonec2-use-private-address=true", "amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true", "amazonec2-security-group=xxxxx", "amazonec2-instance-type=m4.2xlarge", ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` Docker Machineドライバーは`amazonec2`に設定され、マシン名には標準のプレフィックスが付加され、その後に`%s`(必須)が続きます。これは子RunnerのIDに置き換えられます(`gitlab-docker-machine-%s`)。 ご使用のAWSインフラストラクチャに応じて、`MachineOptions`で設定できる多くのオプションがあります。最も一般的なオプションを以下に示します。 | マシンオプション | 説明 | |------------------------------------------------------------------------|-------------| | `amazonec2-access-key=XXXX` | EC2インスタンスを作成する権限を持つユーザーのAWSアクセスキー。[AWS認証情報](#aws-credentials)を参照してください。 | | `amazonec2-secret-key=XXXX` | EC2インスタンスを作成する権限を持つユーザーのAWSシークレットキーについては、[AWS認証情報](#aws-credentials)を参照してください。 | | `amazonec2-region=eu-central-2` | インスタンスを起動するときに使用するリージョン。これを完全に省略すると、デフォルトの`us-east-1`が使用されます。 | | `amazonec2-vpc-id=vpc-xxxxx` | インスタンスを起動する[VPC ID](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#vpc-id)。 | | `amazonec2-subnet-id=subnet-xxxx` | AWS VPCサブネットID。 | | `amazonec2-zone=x` | 指定しない場合、[アベイラビリティゾーンは`a`になります](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#environment-variables-and-default-values)。これは、指定されたサブネットと同じアベイラビリティゾーンに設定する必要があります。たとえば、ゾーンが`eu-west-1b`の場合は`amazonec2-zone=b`にする必要があります。 | | `amazonec2-use-private-address=true` | Docker MachineのプライベートIPアドレスを使用しますが、パブリックIPアドレスを引き続き作成します。トラフィックを内部で維持し、余分なコストを回避するのに役立ちます。 | | `amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true` | AWSの追加タグキー値ペア。AWSコンソールでインスタンスを識別する際に役立ちます。「Name」[タグ](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html)は、デフォルトでマシン名に設定されます。`[[runners]]`で設定されているRunnerの名前に一致するように、「runner-manager-name」に設定しました。これにより、セットアップされている特定のマネージャーにより作成されるすべてのEC2インスタンスをフィルタリングできます。 | | `amazonec2-security-group=xxxx` | AWS VPCセキュリティグループ名。セキュリティグループIDではありません。[AWSセキュリティグループ](#aws-security-groups)を参照してください。 | | `amazonec2-instance-type=m4.2xlarge` | 子Runnerが実行されるインスタンスのタイプ。 | | `amazonec2-ssh-user=xxxx` | インスタンスへのSSHアクセス権を持つユーザー。 | | `amazonec2-iam-instance-profile=xxxx_runner_machine_inst_profile_name` | Runnerマシンに使用するIAMインスタンスプロファイル。 | | `amazonec2-ami=xxxx_runner_machine_ami_id` | 特定のイメージのGitLab Runner AMI ID。 | | `amazonec2-request-spot-instance=true` | オンデマンドの価格よりも安価で利用できる予備のEC2キャパシティを使用します。 | | `amazonec2-spot-price=xxxx_runner_machine_spot_price=x.xx` | スポットインスタンスの入札価格(米ドル)。`--amazonec2-request-spot-instance flag`を`true`に設定する必要があります。`amazonec2-spot-price`を省略すると、Docker Machineは最高価格をデフォルト値(1時間あたり`$0.50`)に設定します。 | | `amazonec2-security-group-readonly=true` | セキュリティグループを読み取り専用に設定します。 | | `amazonec2-userdata=xxxx_runner_machine_userdata_path` | Runnerマシンの`userdata`パスを指定します。 | | `amazonec2-root-size=XX` | インスタンスのルートディスクサイズ(GB単位)。 | ノート: - `MachineOptions`の下には、[AWS Docker Machineドライバーでサポートされている](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#options)すべてのオプションを追加できます。インフラストラクチャのセットアップでさまざまなオプションを適用することが必要となる場合があるため、Dockerのドキュメントを読んでおくことを強くお勧めします。 - `amazonec2-ami`を設定して別のAMI IDを選択しない限り、子インスタンスはデフォルトでUbuntu 16.04を使用します。[Docker Machineでサポートされているベースオペレーティングシステム](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/os-base)のみを設定します。 - マシンオプションの1つとして`amazonec2-private-address-only=true`を指定すると、EC2インスタンスにパブリックIPは割り当てられません。これは、VPCがインターネットゲートウェイ(IGW)で正しく設定されており、ルーティングが正常に機能している場合は問題ありませんが、より複雑な設定では検討が必要となります。詳しくは、[VPC接続に関するDockerドキュメント](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#vpc-connectivity)を参照してください。 `[runners.machine]`の[その他のオプション](../advanced-configuration.md#the-runnersmachine-section)も利用できます。 ### 完全な例 {#getting-it-all-together} 完全な`/etc/gitlab-runner/config.toml`の例を次に示します。 ```toml concurrent = 10 check_interval = 0 [[runners]] name = "gitlab-aws-autoscaler" url = "" token = "" executor = "docker+machine" limit = 20 [runners.docker] image = "alpine" privileged = true disable_cache = true [runners.cache] Type = "s3" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" AccessKey = "" SecretKey = "" BucketName = "" BucketLocation = "us-west-2" [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 100 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-use-private-address=true", "amazonec2-tags=runner-manager-name,gitlab-aws-autoscaler,gitlab,true,gitlab-runner-autoscale,true", "amazonec2-security-group=XXXX", "amazonec2-instance-type=m4.2xlarge", ] [[runners.machine.autoscaling]] Periods = ["* * 9-17 * * mon-fri *"] IdleCount = 50 IdleTime = 3600 Timezone = "UTC" [[runners.machine.autoscaling]] Periods = ["* * * * * sat,sun *"] IdleCount = 5 IdleTime = 60 Timezone = "UTC" ``` ## Amazon EC2スポットインスタンスによってコストを削減する {#cutting-down-costs-with-amazon-ec2-spot-instances} Amazonでは次のように[説明](https://aws.amazon.com/ec2/spot/)されています。 > Amazon EC2スポットインスタンスを使用すると、予備のAmazon EC2コンピューティングキャパシティに入札できます。スポットインスタンスは、オンデマンド料金と比較して割引された料金で利用できることが多いため、アプリケーションの実行コストを大幅に削減し、同じ予算でアプリケーションのコンピューティングキャパシティとスループットを向上させ、新しいタイプのクラウドコンピューティングアプリケーションを有効にすることができます。 上記で選択した[`runners.machine`](#the-runnersmachine-section)オプションに加えて、`/etc/gitlab-runner/config.toml`の`MachineOptions`セクションの下に次の内容を追加します。 ```toml MachineOptions = [ "amazonec2-request-spot-instance=true", "amazonec2-spot-price=", ] ``` この設定では、`amazonec2-spot-price`が空の場合、AWSはスポットインスタンスの入札価格を、そのインスタンスクラスのデフォルトのオンデマンド価格に設定します。`amazonec2-spot-price`を完全に省略すると、Docker Machineは最高価格を[デフォルト値(1時間あたり$0.50)](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/aws.md#environment-variables-and-default-values)に設定します。 スポットインスタンスのリクエストをさらにカスタマイズできます。 ```toml MachineOptions = [ "amazonec2-request-spot-instance=true", "amazonec2-spot-price=0.03", "amazonec2-block-duration-minutes=60" ] ``` この設定では、Docker Machineは1時間あたり最大スポットリクエスト価格が$0.03のスポットインスタンスを使用して作成され、スポットインスタンスの期間は60分に制限されます。前述の数値`0.03`は単なる例です。選択したリージョンに基づいて現在の価格を確認してください。 Amazon EC2スポットインスタンスの詳細については、次のリンクをご覧ください。 - - - ### スポットインスタンスの注意事項 {#caveats-of-spot-instances} スポットインスタンスは、未使用のリソースを利用してインフラストラクチャのコストを最小限に抑える優れた方法ですが、その影響に注意する必要があります。 スポットインスタンスの価格モデルが原因で、スポットインスタンスでCIジョブを実行すると、失敗率が高まる可能性があります。指定したスポット最高価格が現在のスポット価格を超えている場合、リクエストしたキャパシティは取得されません。スポット料金は1時間ごとに改定されます。既存のスポットインスタンスで設定されている最高価格が、改定されたスポットインスタンス価格よりも低い場合、そのスポットインスタンスは2分以内に終了し、スポットホスト上のすべてのジョブは失敗します。 その結果、オートスケールRunnerは新しいインスタンスをリクエストし続けても、新しいマシンを作成できません。これにより、最終的に60件のリクエストが行われ、AWSはそれ以上のリクエストを受け入れなくなります。その後、許容できるスポット価格になっても、呼び出し回数の制限を超えているため、しばらくの間ロックアウトされます。 この状況が発生した場合は、Runnerマネージャーマシンで次のコマンドを使用して、Docker Machineの状態を確認できます。 ```shell docker-machine ls -q --filter state=Error --format "{{.NAME}}" ``` {{< alert type="note" >}} GitLab Runnerがスポット価格の変更を正常に処理することに関していくつかの問題があり、`docker-machine`がDocker Machine継続的に削除しようとするという報告があります。GitLabは、アップストリームプロジェクトで両方のケースに対するパッチを提供しました。詳細については、[イシュー#2771](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2771)と[\#2772](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2772)を参照してください。 {{< /alert >}} GitLabフォークは、AWS EC2フリートとスポットインスタンスでのこれらのフリートの使用をサポートしていません。代替策として、[Continuous Kernel Integration Projectのダウンストリームフォーク](https://gitlab.com/cki-project/mirror/docker-machine)を使用できます。 ## まとめ {#conclusion} このガイドでは、AWSでオートスケールモードでGitLab Runnerをインストールおよび設定する方法を説明しました。 GitLab Runnerのオートスケール機能を使用すると、時間と費用の両方を節約できます。AWSが提供するスポットインスタンスを使用するとさらに節約できますが、その影響に注意する必要があります。入札価格が十分に高ければ、問題はありません。 このチュートリアルに(大きな)影響を与えた次のユースケースを読むことができます。 - [HumanGeo、JenkinsからGitLabへ乗り換え](https://about.gitlab.com/blog/humangeo-switches-jenkins-gitlab-ci/) - [Substrakt Health - GitLab CI/CD Runnerをオートスケールし、EC2コストを90%削減](https://about.gitlab.com/blog/autoscale-ci-runners/) ================================================ FILE: docs-locale/ja-jp/configuration/runner_autoscale_aws_fargate/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: AWS FargateでGitLab CIをオートスケールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="warning" >}} Fargateドライバーは、コミュニティでサポートされています。GitLabサポートは問題のデバッグを支援しますが、保証は提供しません。 {{< /alert >}} GitLabの[custom executor](../../executors/custom.md)ドライバー([AWS Fargate](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/fargate)用)は、Amazon Elastic Container Service (ECS) 上のコンテナを自動的に起動して、各GitLab CIジョブを実行します。 このドキュメントのタスクを完了すると、executorはGitLabから開始されたジョブを実行できます。GitLabでコミットが行われるたびに、GitLabインスタンスは新しいジョブが利用可能になったことをRunnerに通知します。次に、Runnerは、AWS ECSで設定したタスク定義に基づいて、ターゲットECSクラスターで新しいタスクを開始します。任意のDockerイメージを使用するようにAWS ECSタスク定義を設定できます。このアプローチを使用すると、AWS Fargateで実行できるビルドのタイプを完全に柔軟に設定できます。 ![GitLab Runner Fargateドライバーのアーキテクチャ](../img/runner_fargate_driver_ssh.png) このドキュメントでは、実装の最初の理解を深めるための例を示します。本番環境での使用を目的としたものではありません。AWSでは追加のセキュリティが必要です。 たとえば、2つのAWSセキュリティグループが必要になる場合があります: - GitLab RunnerをホストするEC2インスタンスで使用され、制限された外部IP範囲(管理アクセス用)からのSSH接続のみを受け入れるもの。 - Fargateタスクに適用され、EC2インスタンスからのSSHトラフィックのみを許可するもの。 非公開のコンテナレジストリの場合、ECSタスクには、[IAM権限(AWS ECRのみ)](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html)または非ECRプライベートレジストリの[タスクのプライベートレジストリ認証](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/private-auth.html)が必要です。 CloudFormationまたはTerraformを使用して、AWSインフラストラクチャのプロビジョニングとセットアップを自動化できます。 {{< alert type="warning" >}} CI/CDジョブは、`image:`ファイルの`.gitlab-ci.yml`キーワードの値ではなく、ECSタスクで定義されたイメージを使用します。ECSでは、ECSタスクに使用されるイメージをオーバーライドすることはできません。 この制限を回避するには、次の操作を実行できます: - Runnerが使用するすべてのプロジェクトのすべてのビルド依存関係を含むイメージをECSタスク定義に作成して使用します。 - 異なるイメージを持つ複数のECSタスク定義を作成し、`FARGATE_TASK_DEFINITION` CI/CD変数でARNを指定します。 - 公式の[AWS EKSブループリント](https://aws-ia.github.io/terraform-aws-eks-blueprints/)に従って、EKSクラスターの作成を検討してください。 詳細については、[GitLab EKS Fargate Runnerを1時間で開始し、コードをゼロにする](https://about.gitlab.com/blog/eks-fargate-runner/)を参照してください。 {{< /alert >}} {{< alert type="warning" >}} Fargateはコンテナホストを抽象化するため、コンテナホストのプロパティの設定可能性が制限されます。これは、ディスクまたはネットワークへの高いIOを必要とするRunnerワークロードに影響します。これらのプロパティは、Fargateでは設定可能性が限られているか、設定できないためです。FargateでGitLab Runnerを使用する前に、CPU、メモリ、ディスクI/O、またはネットワークI/Oに関するコンピューティング特性の高いRunnerワークロードがFargateに適していることを確認してください。 {{< /alert >}} ## 前提条件 {#prerequisites} 始める前に、以下が必要です: - EC2、ECS、ECRリソースを作成および構成する権限を持つAWS IAMユーザー。 - AWS VPCとサブネット。 - 1つ以上のAWSセキュリティグループ。 ## ステップ1: AWS Fargateタスクのコンテナイメージを準備する {#step-1-prepare-a-container-image-for-the-aws-fargate-task} コンテナイメージを準備します。このイメージをレジストリにアップロードできます。このレジストリは、GitLabジョブの実行時にコンテナを作成するために使用できます。 1. イメージにCIジョブのビルドに必要なツールがあることを確認します。たとえば、Javaプロジェクトには、`Java JDK`やMavenやGradleなどのビルドツールが必要です。Node.jsプロジェクトには、`node`と`npm`が必要です。 1. イメージにアーティファクトとキャッシュを処理するGitLab Runnerがあることを確認します。詳細については、カスタムexecutorドキュメントの[実行](../../executors/custom.md#run)ステージセクションを参照してください。 1. コンテナイメージが公開キー認証を介してSSH接続を受け入れることができることを確認します。Runnerは、この接続を使用して、`.gitlab-ci.yml`ファイルで定義されたビルドコマンドをAWS Fargate上のコンテナに送信します。SSHキーは、Fargateドライバーによって自動的に管理されます。コンテナは、`SSH_PUBLIC_KEY`環境変数からのキーを受け入れることができる必要があります。 GitLab RunnerとSSH構成を含む[Debianの例](https://gitlab.com/tmaczukin-test-projects/fargate-driver-debian)をご覧ください。[Node.jsの例](https://gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate)をご覧ください。 ## ステップ2: コンテナイメージをレジストリにプッシュする {#step-2-push-the-container-image-to-a-registry} イメージを作成したら、ECSタスク定義で使用するために、イメージをコンテナレジストリに公開します。 - リポジトリを作成してイメージをECRにプッシュするには、[Amazon ECRリポジトリ](https://docs.aws.amazon.com/AmazonECR/latest/userguide/Repositories.html)のドキュメントに従ってください。 - AWS CLIを使用してイメージをECRにプッシュするには、[AWS CLIを使用したAmazon ECRの概要](https://docs.aws.amazon.com/AmazonECR/latest/userguide/getting-started-cli.html)ドキュメントに従ってください。 - [GitLabコンテナレジストリ](https://docs.gitlab.com/user/packages/container_registry/)を使用するには、[Debian](https://gitlab.com/tmaczukin-test-projects/fargate-driver-debian)または[NodeJS](https://gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate)の例を使用できます。Debianイメージは`registry.gitlab.com/tmaczukin-test-projects/fargate-driver-debian:latest`に公開されています。NodeJSのサンプルイメージは`registry.gitlab.com/aws-fargate-driver-demo/docker-nodejs-gitlab-ci-fargate:latest`に公開されています。 ## ステップ3: GitLab RunnerのEC2インスタンスを作成する {#step-3-create-an-ec2-instance-for-gitlab-runner} 次に、AWS EC2インスタンスを作成します。次の手順では、GitLab Runnerをインストールします。 1. [https://console.aws.amazon.com/ec2/v2/home#LaunchInstanceWizard](https://console.aws.amazon.com/ec2/v2/home#LaunchInstanceWizard)にアクセスします。 1. インスタンスの場合は、Ubuntu Server 18.04 LTS AMIを選択します。名前は、選択したAWSリージョンによって異なる場合があります。 1. インスタンスタイプの場合は、t2.microを選択します。**次へ: インスタンスの詳細を設定**。 1. **Number of instances**はデフォルトのままにします。 1. **ネットワーク**はネットワーク、VPCを選択します。 1. **Auto-assign Public IP**を**有効**に設定します。 1. **IAM role**で、**Create new IAM role**を選択します。このロールはテストのみを目的としており、安全ではありません。 1. **Create role**を選択します。 1. **AWS service**を選択し、**Common use cases**で、**EC2**を選択します。次に、**次へ:を選択します: 権限**。 1. **AmazonECS_FullAccess**ポリシーのチェックボックスをオンにします。**次へ: タグ**。 1. **次へ: レビュー**。 1. IAMロールの名前(`fargate-test-instance`など)を入力し、**ロールを作成する**を選択します。 1. インスタンスを作成しているブラウザータブに戻ります。 1. **Create new IAM role**の左側にある更新ボタンを選択します。`fargate-test-instance`ロールを選択します。**次へ: ストレージを追加**。 1. **次へ: タグの追加**。 1. **次へ: セキュリティグループを設定**。 1. **Create a new security group**を選択し、`fargate-test`という名前を付けて、SSHのルールが定義されていることを確認します(`Type: SSH, Protocol: TCP, Port Range: 22`)。インバウンドルールとアウトバウンドルールのIP範囲を指定する必要があります。 1. **Review and Launch**を選択します。 1. **Launch**を選択します。 1. オプション。オプション。**Create a new key pair**を選択し、`fargate-runner-manager`という名前を付けて、**Download Key Pair**を選択します。SSHのプライベートキーがコンピューターにダウンロードされます(ブラウザーで構成されたディレクトリを確認してください)。 1. **Launch Instances**を選択します。 1. **View Instances**を選択します。 1. インスタンスが起動するまで待ちます。`IPv4 Public IP`アドレスを書き留めます。 ## ステップ4: EC2インスタンスにGitLab Runnerをインストールして構成する {#step-4-install-and-configure-gitlab-runner-on-the-ec2-instance} 次に、UbuntuインスタンスにGitLab Runnerをインストールします。 1. GitLabプロジェクトの**設定 > CI/CD**に移動し、Runnerセクションを展開します。**Set up a specific Runner manually**で、登録トークンを書き留めます。 1. キーファイルに適切な権限があることを確認するために、`chmod 400 path/to/downloaded/key/file`を実行します。 1. 次のコマンドを使用して、作成したEC2インスタンスにSSHで接続します: ```shell ssh ubuntu@[ip_address] -i path/to/downloaded/key/file ``` 1. 正常に接続されたら、次のコマンドを実行します: ```shell sudo mkdir -p /opt/gitlab-runner/{metadata,builds,cache} curl -s "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" | sudo bash sudo apt install gitlab-runner ``` 1. 手順1でメモしたGitLab URLと登録トークンを使用して、このコマンドを実行します。 ```shell sudo gitlab-runner register --url "https://gitlab.com/" --registration-token TOKEN_HERE --name fargate-test-runner --run-untagged --executor custom -n ``` 1. `sudo vim /etc/gitlab-runner/config.toml`を実行し、次のコンテンツを追加します: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "fargate-test" url = "https://gitlab.com/" token = "__REDACTED__" executor = "custom" builds_dir = "/opt/gitlab-runner/builds" cache_dir = "/opt/gitlab-runner/cache" [runners.custom] volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] config_exec = "/opt/gitlab-runner/fargate" config_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "config"] prepare_exec = "/opt/gitlab-runner/fargate" prepare_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "prepare"] run_exec = "/opt/gitlab-runner/fargate" run_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "run"] cleanup_exec = "/opt/gitlab-runner/fargate" cleanup_args = ["--config", "/etc/gitlab-runner/fargate.toml", "custom", "cleanup"] ``` 1. プライベートCAを持つGitLab Self-Managedインスタンスがある場合は、次の行を追加します: ```toml volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` [証明書を信頼する方法の詳細](../tls-self-signed.md#trusting-the-certificate-for-the-other-cicd-stages)。 以下に示す`config.toml`のセクションは、登録コマンドによって作成されます。変更しないでください。 ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 name = "fargate-test" url = "https://gitlab.com/" token = "__REDACTED__" executor = "custom" ``` 1. `sudo vim /etc/gitlab-runner/fargate.toml`を実行し、次のコンテンツを追加します: ```toml LogLevel = "info" LogFormat = "text" [Fargate] Cluster = "test-cluster" Region = "us-east-2" Subnet = "subnet-xxxxxx" SecurityGroup = "sg-xxxxxxxxxxxxx" TaskDefinition = "test-task:1" EnablePublicIP = true [TaskMetadata] Directory = "/opt/gitlab-runner/metadata" [SSH] Username = "root" Port = 22 ``` - `Cluster`の値と`TaskDefinition`の名前を書き留めます。この例では、`test-task`がリビジョン番号として`:1`と表示されています。リビジョン番号が指定されていない場合は、最新の**active**なリビジョンが使用されます。 - リージョンを選択します。Runnerマネージャーインスタンスから`Subnet`の値を取得します。 - セキュリティグループIDを見つける方法: 1. AWSのインスタンスのリストで、作成したEC2インスタンスを選択します。詳細が表示されます。 1. **Security groups**で、作成したグループの名前を選択します。 1. **Security group ID**をコピーします。 本番環境では、セキュリティグループの設定と使用に関する[AWSガイドライン](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-security-groups.html)に従ってください。 - `EnablePublicIP`がtrueに設定されている場合、タスクコンテナのパブリックIPが収集され、SSH接続が実行されます。 - `EnablePublicIP`がfalseに設定されている場合: - Fargateドライバーは、タスクコンテナのプライベートIPを使用します。`false`に設定されている場合に接続をセットアップするには、VPCセキュリティグループにポート22(SSH)のインバウンドルールが必要です。ソースはVPC CIDRです。 - 外部依存関係をフェッチするには、プロビジョニングされたAWS Fargateコンテナがパブリックインターネットにアクセスできる必要があります。AWS Fargateコンテナにパブリックインターネットアクセスを提供するには、VPCでNATゲートウェイを使用できます。 - SSHサーバーのポート番号はオプションです。省略した場合、デフォルトのSSHポート(22)が使用されます。 - セクション設定の詳細については、[Fargateドライバードキュメント](https://gitlab.com/gitlab-org/ci-cd/custom-executor-drivers/fargate/-/tree/master/docs#configuration)を参照してください。 1. Fargateドライバーをインストールします: ```shell sudo curl -Lo /opt/gitlab-runner/fargate "https://gitlab-runner-custom-fargate-downloads.s3.amazonaws.com/latest/fargate-linux-amd64" sudo chmod +x /opt/gitlab-runner/fargate ``` ## ステップ5: ECS Fargateクラスターを作成する {#step-5-create-an-ecs-fargate-cluster} Amazon ECSクラスターは、ECSコンテナインスタンスのグループです。 1. [`https://console.aws.amazon.com/ecs/home#/clusters`](https://console.aws.amazon.com/ecs/home#/clusters)にアクセスします。 1. **Create Cluster**を選択します。 1. **Networking only**タイプを選択します。**次のステップ**を選択します。 1. 名前を`test-cluster`(`fargate.toml`と同じ)にします。 1. **Create**を選択します。 1. **View cluster**を選択します。`Cluster ARN`の値からリージョンとアカウントIDの部分を書き留めます。 1. **Update Cluster**を選択します。 1. `Default capacity provider strategy`の横にある**Add another provider**を選択し、`FARGATE`を選択します。**更新**を選択します。 ECS Fargateでのクラスターの設定と操作の詳細な手順については、[AWSドキュメント](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/Welcome.html)を参照してください。 ## ステップ6: ECSタスク定義を作成する {#step-6-create-an-ecs-task-definition} この手順では、タイプ`Fargate`のタスク定義を作成し、CIビルドに使用するコンテナイメージを参照します。 1. [`https://console.aws.amazon.com/ecs/home#/taskDefinitions`](https://console.aws.amazon.com/ecs/home#/taskDefinitions)にアクセスします。 1. **Create new Task Definition**を選択します。 1. **FARGATE**を選択し、**次のステップ**を選択します。 1. 名前を`test-task`にします。(注: 名前は`fargate.toml`ファイルで定義されているのと同じ値ですが、`:1`はありません)。 1. **Task memory (GB)**と**Task CPU (vCPU)**の値を選択します。 1. **Add container**を選択します。次に: 1. `ci-coordinator`という名前を付けて、Fargateドライバーが`SSH_PUBLIC_KEY`環境変数を挿入できるようにします。 1. イメージを定義します(例:`registry.gitlab.com/tmaczukin-test-projects/fargate-driver-debian:latest`)。 1. 22/TCPのポートマッピングを定義します。 1. **追加**を選択します。 1. **Create**を選択します。 1. **View task definition**を選択します。 {{< alert type="warning" >}} 単一のFargateタスクで、1つまたは複数のコンテナを起動できます。Fargateドライバーは、`ci-coordinator`という名前のコンテナにのみ、`SSH_PUBLIC_KEY`環境変数を挿入します。Fargateドライバーで使用されるすべてのタスク定義に、この名前のコンテナが必要です。この名前の付いたコンテナは、上記のように、SSHサーバーとすべてのGitLab Runnerの要件がインストールされているものである必要があります。 {{< /alert >}} タスク定義の設定と操作の詳細な手順については、AWSの[ドキュメント](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/create-task-definition.html)を参照してください。 AWS ECRからイメージを起動するために必要なECSサービス許可については、[Amazon ECSタスク実行IAMロール](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html)を参照してください。 GitLabインスタンスでホストされているものを含む、プライベートレジストリへのECS認証については、[タスクのプライベートレジストリ認証](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/private-auth.html)を参照してください。 この時点で、RunnerマネージャーとFargateドライバーが構成され、AWS AWS Fargateでジョブの実行を開始する準備が完了します。 ## ステップ7: 設定のテスト {#step-7-test-the-configuration} これで設定を使用する準備ができました。 1. GitLabプロジェクトで、`.gitlab-ci.yml`ファイルを作成します: ```yaml test: script: - echo "It works!" - for i in $(seq 1 30); do echo "."; sleep 1; done ``` 1. プロジェクトの**CI/CD > パイプライン**に移動します。 1. **Run Pipeline**を選択します。 1. ブランチとすべての変数を更新し、**Run Pipeline**を選択します。 {{< alert type="note" >}} `.gitlab-ci.yml`ファイル内の`image`および`service`キーワードは無視されます。Runnerは、タスク定義で指定された値のみを使用します。 {{< /alert >}} ## クリーンアップ {#clean-up} AWS AWS Fargateでカスタムexecutorをテストした後でクリーンアップを実行する場合は、次のオブジェクトを削除します: - [手順3](#step-3-create-an-ec2-instance-for-gitlab-runner)で作成されたEC2インスタンス、キーペア、IAMロール、およびセキュリティグループ。 - [手順5](#step-5-create-an-ecs-fargate-cluster)で作成されたECS AWS Fargateクラスター。 - [手順6](#step-6-create-an-ecs-task-definition)で作成されたECSタスク定義。 ## プライベートAWS AWS Fargateタスクの設定 {#configure-a-private-aws-fargate-task} 高度なセキュリティを確保するには、[プライベートAWS AWS Fargateタスク](https://repost.aws/knowledge-center/ecs-fargate-tasks-private-subnet)を設定します。この設定では、executorは内部AWS IPアドレスのみを使用します。CI/CDジョブがプライベートAWS AWS Fargateインスタンスで実行されるように、AWSからの送信トラフィックのみを許可します。 プライベートAWS AWS Fargateタスクを設定するには、次の手順を完了して、AWSを設定し、プライベートサブネットでAWS AWS Fargateタスクを実行します: 1. 既存のパブリックサブネットが、VPCアドレス範囲内のすべてのIPアドレスを予約していないことを確認します。VPCとサブネットの`cird`アドレス範囲を調べます。サブネット`cird`アドレス範囲がVPC `cird`アドレス範囲のサブセットである場合は、手順2と4をスキップします。それ以外の場合、VPCに使用可能なアドレス範囲がないため、VPCとパブリックサブネットを削除して再作成する必要があります: 1. 既存のサブネットとVPCを削除します。 1. 削除したVPCと同じ設定で[VPCを作成する](https://docs.aws.amazon.com/vpc/latest/privatelink/create-interface-endpoint.html#create-interface-endpoint)し、`cird`アドレス(例:`10.0.0.0/23`)を更新します。 1. 削除したサブネットと同じ設定で[パブリックサブネットを作成する](https://docs.aws.amazon.com/vpc/latest/privatelink/interface-endpoints.html)。`cird`アドレス範囲(例:`10.0.0.0/24`)であるVPCアドレス範囲のサブセットであるアドレスを使用します。 1. パブリックサブネットと同じ設定で[プライベートサブネットを作成する](https://docs.aws.amazon.com/vpc/latest/userguide/create-subnet.html#create-subnets)。`cird`アドレス範囲(例:`10.0.1.0/24`)であるパブリックサブネット範囲と重複しないアドレス範囲を使用します。 1. [NATゲートウェイを作成する](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-nat-gateway.html)し、パブリックサブネット内に配置します。 1. 宛先`0.0.0.0/0`がNATゲートウェイを指すように、プライベートサブネットルーティングテーブルを変更します。 1. `farget.toml`設定を更新します: ```toml Subnet = "private-subnet-id" EnablePublicIP = false UsePublicIP = false ``` 1. Fargateタスクに関連付けられているIAMロールに次のインラインポリシーを追加します(Fargateタスクに関連付けられているIAMロールは通常、`ecsTaskExecutionRole`という名前で、既に存在しているはずです)。 ```json { "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "secretsmanager:GetSecretValue", "kms:Decrypt", "ssm:GetParameters" ], "Resource": [ "arn:aws:secretsmanager:*::secret:*", "arn:aws:kms:*::key/*" ] } ] } ``` 1. セキュリティグループ自体の参照するように、セキュリティグループの「受信ルール」を変更します。AWS設定ダイアログで、以下を実行します: - `Type`を`ssh`に設定します。 - `Source`を`Custom`に設定します。 - セキュリティグループを選択します。 - 任意のホストからのSSHアクセスを許可する既存の受信ルールを削除します。 {{< alert type="warning" >}} 既存の受信ルールを削除すると、SSHを使用してAmazon Elastic Compute Cloudインスタンスに接続できなくなります。 {{< /alert >}} 詳細については、次のAWSドキュメントを参照してください: - [Amazon ECSタスク実行IAMロール](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_execution_IAM_role.html) - [Amazon ECRインターフェースVPCエンドポイント(AWS PrivateLink)](https://docs.aws.amazon.com/AmazonECR/latest/userguide/vpc-endpoints.html) - [Amazon ECSインターフェースVPCエンドポイント](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/vpc-endpoints.html) - [パブリックサブネットとプライベートサブネットを持つVPC](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-example-private-subnets-nat.html) ## トラブルシューティング {#troubleshooting} ### 設定をテストする際のエラー`No Container Instances were found in your cluster` {#no-container-instances-were-found-in-your-cluster-error-when-testing-the-configuration} `error="starting new Fargate task: running new task on Fargate: error starting AWS Fargate Task: InvalidParameterException: No Container Instances were found in your cluster."` AWS AWS Fargateドライバーでは、[デフォルトのキャパシティプロバイダー戦略](#step-5-create-an-ecs-fargate-cluster)でECSクラスターが設定されている必要があります。 詳細情報: - デフォルトの[キャパシティプロバイダー戦略](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/cluster-capacity-providers.html)は、各Amazon ECSクラスターに関連付けられています。他のキャパシティプロバイダー戦略または起動タイプが指定されていない場合、タスクの実行またはサービスの作成時に、クラスターはこの戦略を使用します。 - [`capacityProviderStrategy`](https://docs.aws.amazon.com/AmazonECS/latest/APIReference/API_RunTask.html#ECS-RunTask-request-capacityProviderStrategy)が指定されている場合、`launchType`パラメータは省略する必要があります。`capacityProviderStrategy`または`launchType`が指定されていない場合、クラスターの`defaultCapacityProviderStrategy`が使用されます。 ### ジョブの実行時のメタデータ`file does not exist`エラー {#metadata-file-does-not-exist-error-when-running-jobs} `Application execution failed PID=xxxxx error="obtaining information about the running task: trying to access file \"/opt/gitlab-runner/metadata/-xxxxx.json\": file does not exist" cleanup_std=err job=xxxxx project=xx runner=` IAMロールポリシーが正しく設定され、`/opt/gitlab-runner/metadata/`にメタデータJSONファイルを作成するための書き込み操作を実行できることを確認してください。非本番環境でテストするには、AmazonECS_FullAccessポリシーを使用します。組織のセキュリティ要件に従ってIAMロールポリシーを確認します。 ### ジョブの実行時の`connection timed out` {#connection-timed-out-when-running-jobs} `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"172.x.x.x\": connecting to server: connecting to server \"172.x.x.x:22\" as user \"root\": dial tcp 172.x.x.x:22: connect: connection timed out"` `EnablePublicIP`がfalseに設定されている場合は、VPCセキュリティグループに、SSH接続を許可する受信ルールがあることを確認してください。AWS AWS Fargateタスクコンテナは、GitLab Runner EC2インスタンスからのSSHトラフィックを受け入れる必要があります。 ### ジョブの実行時の`connection refused` {#connection-refused-when-running-jobs} `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"10.x.x.x\": connecting to server: connecting to server \"10.x.x.x:22\" as user \"root\": dial tcp 10.x.x.x:22: connect: connection refused"` タスクコンテナのポート22が公開されており、[手順6の指示に基づいてポートマッピングが設定されていることを確認します: ECSタスク定義を作成します](#step-6-create-an-ecs-task-definition)。ポートが公開されていて、コンテナが設定されている場合: 1. **Amazon ECS > Clusters > Choose your task definition > Tasks**で、コンテナのエラーがないか確認します。 1. `Stopped`ステータスのタスクを表示し、失敗した最新のタスクを確認します。コンテナに失敗がある場合、**logs**タブには詳細が表示されます。 または、Dockerコンテナをローカルで実行できることを確認します。 ### エラー: `ssh: unable to authenticate, attempted methods [none publickey], no supported methods remain` {#error-ssh-unable-to-authenticate-attempted-methods-none-publickey-no-supported-methods-remain} AWS AWS Fargateドライバーの古いバージョンが原因で、サポートされていないキータイプが使用されている場合、次のエラーが発生します。 `Application execution failed PID=xxxx error="executing the script on the remote host: executing script on container with IP \"172.x.x.x\": connecting to server: connecting to server \"172.x.x.x:22\" as user \"root\": ssh: handshake failed: ssh: unable to authenticate, attempted methods [none publickey], no supported methods remain"` この問題を解決するには、最新のAWS AWS FargateドライバーをGitLab Runner EC2インスタンスにインストールします: ```shell sudo curl -Lo /opt/gitlab-runner/fargate "https://gitlab-runner-custom-fargate-downloads.s3.amazonaws.com/latest/fargate-linux-amd64" sudo chmod +x /opt/gitlab-runner/fargate ``` ================================================ FILE: docs-locale/ja-jp/configuration/slot_based_cgroups.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: スロットベースのcgroupのサポート --- スロットベースのcgroupのサポートにより、GitLab Runnerをオートスケールで使用する際の、リソースの分離と管理が向上します。スロットベースのcgroupは、オートスケーラーによって割り当てられたスロット番号に基づいて、特定のコントロールグループ(cgroup)にジョブを自動的に割り当てます。 ## メリット {#benefits} - リソース分離の改善: 同じインスタンス上の同時ジョブ間のリソースの干渉を防ぎます。 - モニタリングの簡素化: スロットごとのリソース使用量を個別に追跡できます。 - デバッグの改善: Cgroupベースのメトリクスは、リソースを大量に消費するジョブを特定するのに役立ちます。 - きめ細かい制御: 予測可能なパフォーマンスのために、スロットごとにリソース制限を設定します。 ## サポートされているexecutor {#supported-executors} スロットベースのcgroupは、スロット管理に[taskscaler](https://gitlab.com/gitlab-org/fleeting/taskscaler)を使用するオートスケールexecutorで動作します: - [Docker Autoscaler executor](../executors/docker_autoscaler.md#slot-based-cgroup-support) - [インスタンスexecutor](../executors/instance.md#slot-based-cgroup-support) ## 前提条件 {#prerequisites} - cgroup v2をサポートするLinuxホスト - 初期cgroup階層設定のためのルートアクセス - オートスケーラー機能を備えたGitLab Runner - スロットの割り当てのためのtaskscaler(オートスケーラーによって自動的に提供されます) ## 設定 {#configuration} スロットベースのcgroupサポートを有効にするには、以下を`config.toml`に追加します。 ### `systemd` cgroupドライバーを使用するDockerの場合 {#for-docker-with-systemd-cgroup-driver} Dockerが`systemd` cgroupドライバー(最も一般的)を使用している場合は、`systemd`スライスの形式を使用します: ```toml [[runners]] name = "my-autoscaler-runner" executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "runner-slot-${slot}.slice" [runners.autoscaler] capacity_per_instance = 4 ``` ### `cgroupfs`ドライバーを使用するDockerの場合 {#for-docker-with-cgroupfs-driver} Dockerが`cgroupfs`ドライバーを使用している場合は、raw `cgroup`パス形式を使用します: ```toml [[runners]] name = "my-autoscaler-runner" executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "gitlab-runner/slot-${slot}" [runners.autoscaler] capacity_per_instance = 4 ``` ### 設定オプション {#configuration-options} | 設定 | 説明 | デフォルト | |---------|-------------|---------| | `use_slot_cgroups` | スロットベースのcgroupの割り当てを有効にする | `false` | | `slot_cgroup_template` | cgroupパスのテンプレート。プレースホルダーとして`${slot}`を使用します。形式は、Dockerのcgroupドライバーによって異なります(systemd: `runner-slot-${slot}.slice`、cgroupfs: `gitlab-runner/slot-${slot}`)。 | `"gitlab-runner/slot-${slot}"` | テンプレートは、スロット番号のプレースホルダーとして`${slot}`を使用するbashスタイルの変数展開を使用します。例: - `systemd`ドライバーの場合: スロット5の場合、`runner-slot-${slot}.slice`は`runner-slot-5.slice`になります - `cgroupfs`ドライバーの場合: スロット5の場合、`gitlab-runner/slot-${slot}`は`gitlab-runner/slot-5`になります 次のコマンドを実行して、Docker cgroupドライバーを確認します: `docker info | grep "Cgroup Driver"` ### Docker固有の設定 {#docker-specific-configuration} Docker Autoscaler executorを使用している場合は、サービスコンテナ用に別のテンプレートを指定できます: ```toml [[runners]] executor = "docker-autoscaler" use_slot_cgroups = true slot_cgroup_template = "runner-slot-${slot}.slice" [runners.docker] service_slot_cgroup_template = "runner-slot-${slot}.slice" ``` | 設定 | 説明 | デフォルト | |---------|-------------|---------| | `service_slot_cgroup_template` | サービスコンテナcgroupパスのテンプレート。Dockerのcgroupドライバー形式に一致する必要があります | `slot_cgroup_template`と同じ | ## 環境設定 {#environment-setup} スロットベースのcgroupを有効にする前に、Runnerホストでcgroup階層を準備します。 ### systemd cgroupドライバーのセットアップスクリプト {#setup-script-for-systemd-cgroup-driver} Dockerが`systemd` cgroupドライバー(`docker info | grep "Cgroup Driver"`で確認)を使用している場合は、raw cgroupディレクトリの代わりに`systemd`スライスを作成する必要があります。 セットアップスクリプトを作成します(`gitlab-runner-systemd-slice-setup.sh`): ```shell #!/bin/bash # gitlab-runner-systemd-slice-setup.sh # Script to set up systemd slices for GitLab Runner slot-based cgroups # This example configures 4 slots on an 8-core machine, with each slot pinned to 2 CPUs set -e MAX_SLOTS=4 # Adjust based on your capacity_per_instance configuration # CPU pinning configuration (2 CPUs per slot on an 8-core machine) # Format: comma-separated CPU list for systemd AllowedCPUs declare -a CPU_ASSIGNMENTS=( "0,1" # Slot 0: CPUs 0 and 1 "2,3" # Slot 1: CPUs 2 and 3 "4,5" # Slot 2: CPUs 4 and 5 "6,7" # Slot 3: CPUs 6 and 7 ) # Check if running as root if [[ $EUID -ne 0 ]]; then echo "This script must be run as root for systemd slice setup" exit 1 fi # Verify systemd is available if ! command -v systemctl &> /dev/null; then echo "Error: systemctl not found. This script requires systemd." exit 1 fi echo "Setting up systemd slices for GitLab Runner" echo "Configuration: $MAX_SLOTS slots on an 8-core machine (2 CPUs per slot)" for ((slot=0; slot "/etc/systemd/system/$slice_name" </dev/null || echo "inactive") echo " $slice_name: $status" done echo "" echo "To verify CPU assignments, check:" echo " systemctl show runner-slot-0.slice | grep AllowedCPUs" ``` セットアップスクリプトを実行します: ```shell chmod +x gitlab-runner-systemd-slice-setup.sh sudo ./gitlab-runner-systemd-slice-setup.sh ``` ### `cgroupfs`ドライバーのセットアップスクリプト(代替) {#setup-script-for-cgroupfs-driver-alternative} Dockerが`systemd`の代わりに`cgroupfs`ドライバーを使用している場合は、raw cgroupディレクトリを作成するこの代替スクリプトを使用します: ```shell #!/bin/bash # gitlab-runner-cgroup-setup.sh # Script to set up cgroup v2 hierarchy for GitLab Runner slot-based cgroups # This example configures 4 slots on an 8-core machine, with each slot pinned to 2 CPUs # Use this script only if Docker is using the cgroupfs driver (not systemd) set -e CGROUP_ROOT="/sys/fs/cgroup" RUNNER_CGROUP="gitlab-runner" MAX_SLOTS=4 # Adjust based on your capacity_per_instance configuration # CPU pinning configuration (2 CPUs per slot on an 8-core machine) # Format: "cpu_list" - adjust based on your CPU topology declare -a CPU_ASSIGNMENTS=( "0-1" # Slot 0: CPUs 0 and 1 "2-3" # Slot 1: CPUs 2 and 3 "4-5" # Slot 2: CPUs 4 and 5 "6-7" # Slot 3: CPUs 6 and 7 ) # Check if running as root if [[ $EUID -ne 0 ]]; then echo "This script must be run as root for cgroup setup" exit 1 fi # Verify cgroup v2 is available if [[ ! -f "$CGROUP_ROOT/cgroup.controllers" ]]; then echo "Error: cgroup v2 not detected. This script requires cgroup v2." exit 1 fi echo "Setting up cgroup v2 hierarchy for GitLab Runner" echo "Configuration: $MAX_SLOTS slots on an 8-core machine (2 CPUs per slot)" # Create base runner cgroup mkdir -p "$CGROUP_ROOT/$RUNNER_CGROUP" # Enable controllers if available if [[ -f "$CGROUP_ROOT/cgroup.controllers" ]]; then echo "+memory +cpu +cpuset" > "$CGROUP_ROOT/cgroup.subtree_control" 2>/dev/null || true fi # Create slot-specific cgroups for ((slot=0; slot "$CGROUP_ROOT/$RUNNER_CGROUP/cgroup.subtree_control" 2>/dev/null || true fi # Pin slot to specific CPUs echo "${CPU_ASSIGNMENTS[$slot]}" > "$slot_path/cpuset.cpus" # Set memory nodes (usually 0 for single NUMA node systems) echo "0" > "$slot_path/cpuset.mems" # Set permissions for GitLab Runner user chown -R gitlab-runner:gitlab-runner "$slot_path" 2>/dev/null || true done echo "Cgroup setup complete!" # Verify setup echo "" echo "Verifying cgroup setup:" for ((slot=0; slot /sys/fs/cgroup/$GITLAB_RUNNER_SLOT_CGROUP/memory.max - echo "50000" > /sys/fs/cgroup/$GITLAB_RUNNER_SLOT_CGROUP/cpu.max - ./my-process ``` ## トラブルシューティング {#troubleshooting} ### コンテナがcgroupエラーで起動に失敗する {#containers-fail-to-start-with-cgroup-errors} 1. cgroupパスが`/sys/fs/cgroup/`の下に存在することを確認します: ```shell ls -la /sys/fs/cgroup/gitlab-runner/ ``` 1. GitLab Runnerユーザーにcgroupディレクトリへの書き込みアクセス権があることを確認します: ```shell ls -la /sys/fs/cgroup/gitlab-runner/slot-0/ ``` 1. `slot_cgroup_template`が`${slot}`プレースホルダーで正しい形式を使用していることを確認します: 1. 特定のcgroup作成エラーについて、GitLab Runnerログを確認します: 1. 手動でテストします: Docker Autoscaler executorの場合: ```shell docker run --rm --cgroup-parent=gitlab-runner/slot-0 alpine echo "test" ``` インスタンスexecutorの場合: ```yaml job: script: - echo "Slot cgroup: $GITLAB_RUNNER_SLOT_CGROUP" ``` ### ジョブが同じcgroupを使用する {#jobs-use-the-same-cgroup} テンプレートに`${slot}`プレースホルダーが含まれていないことに関する警告がログに表示される場合: ```plaintext level=warning msg="Slot cgroup template does not contain ${slot} placeholder. All jobs will use the same cgroup, defeating the purpose of slot-based isolation." ``` これは、`slot_cgroup_template`に`${slot}`変数がないことを意味します。プレースホルダーを含めるように設定を更新します: ```toml [[runners]] slot_cgroup_template = "gitlab-runner/slot-${slot}" ``` ### Cgroup v2は利用できません {#cgroup-v2-not-available} セットアップスクリプトがcgroup v2が検出されないと報告した場合は、システムで有効にする必要があるかもしれません。cgroup v2を有効にする方法については、Linuxディストリビューションのドキュメントを確認してください。最新のディストリビューションでは、通常、デフォルトで有効になっています。 ================================================ FILE: docs-locale/ja-jp/configuration/speed_up_job_execution.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: ジョブの実行を高速化する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} イメージと依存関係をキャッシュすることで、ジョブのパフォーマンスを向上させることができます。 ## コンテナのプロキシの使用 {#use-a-proxy-for-containers} 以下を使用すると、Dockerイメージをダウンロードする時間を短縮できます: - GitLab依存プロキシ、または - DockerHubレジストリのミラー - その他のオープンソースソリューション ### GitLab Dependency Proxy {#gitlab-dependency-proxy} コンテナイメージへのアクセスをより迅速に行うために、[依存プロキシを使用](https://docs.gitlab.com/user/packages/dependency_proxy/)して、コンテナイメージをプロキシできます。 ### Docker Hubレジストリミラー {#docker-hub-registry-mirror} Docker Hubをミラーリングすることで、ジョブがコンテナイメージにアクセスする時間を短縮することもできます。これにより、[Registry as a pull through cache](https://docs.docker.com/docker-hub/image-library/mirror/)になります。ジョブの実行速度が向上するだけでなく、ミラーを使用すると、Docker Hub停止やDocker Hubレート制限に対するインフラストラクチャの耐性を高めることができます。 Dockerデーモンが[mirrorを使用するように設定されている](https://docs.docker.com/docker-hub/image-library/mirror/#configure-the-docker-daemon)場合、ミラーの実行中のインスタンスでイメージが自動的に確認されます。利用できない場合、パブリックDockerレジストリからイメージをプルし、ローカルに保存してから、ユーザーに返します。 同じイメージに対する次のリクエストは、ローカルレジストリからプルされます。 その仕組みの詳細については、[Dockerデーモンの設定ドキュメント](https://docs.docker.com/docker-hub/image-library/mirror/#configure-the-docker-daemon)を参照してください。 #### Docker Hubレジストリミラーを使用 {#use-a-docker-hub-registry-mirror} Docker Hubレジストリミラーを作成するには、次の手順に従います: 1. プロキシコンテナレジストリが実行される専用マシンにログインします。 1. [Docker Engine](https://docs.docker.com/get-started/get-docker/)がそのマシンにインストールされていることを確認してください。 1. 新しいコンテナレジストリを作成します: ```shell docker run -d -p 6000:5000 \ -e REGISTRY_PROXY_REMOTEURL=https://registry-1.docker.io \ --restart always \ --name registry registry:2 ``` レジストリを別のポートで公開するには、ポート番号(`6000`)を変更できます。これにより、`http`でサーバーが起動します。TLS(`https`)を有効にする場合は、[公式ドキュメント](https://distribution.github.io/distribution/about/configuration/#tls)に従ってください。 1. サーバーのIPアドレスを確認します: ```shell hostname --ip-address ``` プライベートネットワークのIPアドレスを選択する必要があります。通常、プライベートネットワークは、DigitalOcean、AWS、またはAzureのような単一プロバイダーのマシン間の内部通信に最適なソリューションです。通常、プライベートネットワークで転送されるデータは、月間帯域幅の制限には適用されません。 Docker Hubレジストリは、`MY_REGISTRY_IP:6000`でアクセスできます。 新しいレジストリサーバーを使用するように[`config.toml`設定](autoscale.md#distributed-container-registry-mirroring)できるようになりました。 ### その他のオープンソースソリューション {#other-open-source-solutions} - [`rpardini/docker-registry-proxy`](https://github.com/rpardini/docker-registry-proxy)は、GitLabコンテナレジストリを含む、ほとんどのコンテナレジストリをローカルでプロキシできます。 ## 分散キャッシュを使用する {#use-a-distributed-cache} 分散[キャッシュ](https://docs.gitlab.com/ci/yaml/#cache)を使用すると、言語の依存関係をダウンロードする時間を短縮できます。 分散キャッシュを指定するには、キャッシュサーバーをセットアップしてから、[Runnerがそのキャッシュサーバーを使用するように設定します](advanced-configuration.md#the-runnerscache-section)。 オートスケールを使用している場合は、分散Runnerの[キャッシュ機能](autoscale.md#distributed-runners-caching)の詳細をご覧ください。 以下のキャッシュサーバーがサポートされています: - [AWS S3](#use-aws-s3) - [MinIO](#use-minio)またはその他のS3互換キャッシュサーバー - [Google Cloud Storage](#use-google-cloud-storage) - [Azure Blob Storage](#use-azure-blob-storage) GitLab CI/CDの[キャッシュの依存関係とベストプラクティス](https://docs.gitlab.com/ci/caching/)をご覧ください。 ### AWS S3を使用 {#use-aws-s3} 分散キャッシュとしてAWS S3を使用するには、[Runnerの`config.toml`設定ファイルを編集](advanced-configuration.md#the-runnerscaches3-section)してS3の場所を指定し、接続用の認証情報を提供します。RunnerにS3エンドポイントへのネットワークパスがあることを確認してください。 S3 VPCエンドポイントを有効にすると、NATゲートウェイを備えたプライベートサブネットを使用している場合、データ転送のコストを節約できます。 ### MinIOを使用 {#use-minio} AWS S3を使用する代わりに、独自のキャッシュストレージを作成できます。 1. キャッシュサーバーが実行される専用マシンにログインします。 1. [Docker Engine](https://docs.docker.com/get-started/get-docker/)がそのマシンにインストールされていることを確認してください。 1. Goで記述されたシンプルなS3互換サーバーである[MinIO](https://www.min.io)を起動します: ```shell docker run -d --restart always -p 9005:9000 \ -v /.minio:/root/.minio -v /export:/export \ -e "MINIO_ROOT_USER=" \ -e "MINIO_ROOT_PASSWORD=" \ --name minio \ minio/minio:latest server /export ``` 別のポートでキャッシュサーバーを公開するには、ポート`9005`を変更できます。 1. サーバーのIPアドレスを確認します: ```shell hostname --ip-address ``` 1. キャッシュサーバーは`MY_CACHE_IP:9005`で利用可能になります。 1. Runnerで使用されるバケットを作成します: ```shell sudo mkdir /export/runner ``` `runner`はその場合のバケットの名前です。別のバケットを選択した場合、それは異なります。すべてのキャッシュは`/export`ディレクトリに保存されます。 1. Runnerを設定するときに、(上記から)`MINIO_ROOT_USER`値と`MINIO_ROOT_PASSWORD`値をアクセスキーとシークレットキーとして使用します。 新しいキャッシュサーバーを使用するように[`config.toml`設定](autoscale.md#distributed-runners-caching)できるようになりました。 ### Google Cloud Storage {#use-google-cloud-storage} 分散キャッシュとしてGoogle Cloud Platformを使用するには、[Runnerの`config.toml`設定ファイルを編集](advanced-configuration.md#the-runnerscachegcs-section)してGCPの場所を指定し、接続用の認証情報を提供します。RunnerにGCSエンドポイントへのネットワークパスがあることを確認してください。 ### Azure Blob Storageを使用する {#use-azure-blob-storage} 分散キャッシュとしてAzure Blobストレージを使用するには、[Runnerの`config.toml`設定ファイルを編集](advanced-configuration.md#the-runnerscacheazure-section)してAzureの場所を指定し、接続用の認証情報を提供します。RunnerにAzureエンドポイントへのネットワークパスがあることを確認してください。 ================================================ FILE: docs-locale/ja-jp/configuration/tls-self-signed.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: 自己署名証明書またはカスタム認証局 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerには、TLSピアの検証に使用される証明書を設定するための2つのオプションがあります。 - **For connections to the GitLab server**: 証明書ファイルは、[GitLabサーバーを対象とした自己署名証明書のサポートされているオプション](#supported-options-for-self-signed-certificates-targeting-the-gitlab-server)セクションで詳しく説明されているように指定できます。 これにより、`x509: certificate signed by unknown authority` Runner登録時の問題が解決されます。 既存のRunnerの場合、ジョブを確認しようとするとRunnerログに同じエラーが示されることがあります。 ```plaintext Couldn't execute POST against https://hostname.tld/api/v4/jobs/request: Post https://hostname.tld/api/v4/jobs/request: x509: certificate signed by unknown authority ``` - **Connecting to a cache server or an external Git LFS store**: より一般的なアプローチで、ユーザースクリプトなどの他のシナリオも対象としており、コンテナに証明書を指定してインストールすることができます。[DockerおよびKubernetes executorのTLS証明書の信頼](#trusting-tls-certificates-for-docker-and-kubernetes-executors)セクションで詳しく説明されています。 証明書が欠落しているGit LFSオペレーションに関するジョブログのエラーの例 ```plaintext LFS: Get https://object.hostname.tld/lfs-dev/c8/95/a34909dce385b85cee1a943788044859d685e66c002dbf7b28e10abeef20?X-Amz-Expires=600&X-Amz-Date=20201006T043010Z&X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=svcgitlabstoragedev%2F20201006%2Fus-east-1%2Fs3%2Faws4_request&X-Amz-SignedHeaders=host&X-Amz-Signature=012211eb0ff0e374086e8c2d37556f2d8ca4cc948763e90896f8f5774a100b55: x509: certificate signed by unknown authority ``` ## GitLabサーバーを対象とする自己署名証明書のサポートされているオプション {#supported-options-for-self-signed-certificates-targeting-the-gitlab-server} このセクションでは、GitLabサーバーのみがカスタム証明書を必要とする状況について説明します。他のホスト([プロキシダウンロードが有効](https://docs.gitlab.com/administration/object_storage/#proxy-download)になっていないオブジェクトストレージサービスなど)もカスタム認証局(CA)を必要とする場合は、[次のセクション](#trusting-tls-certificates-for-docker-and-kubernetes-executors)を参照してください。 GitLab Runnerは次のオプションをサポートしています。 - **デフォルト - システム証明書を読み取る**: GitLab Runnerはシステム証明書ストアを読み取り、システムに保存されている公開認証局(CA)に照らしてGitLabサーバーを検証します。 - **カスタム証明書ファイルを指定する**: GitLab Runnerは、[登録時](../commands/_index.md#gitlab-runner-register)(`gitlab-runner register --tls-ca-file=/path`)および[`config.toml`](advanced-configuration.md)の`[[runners]]`セクションで`tls-ca-file`オプションを公開します。これにより、カスタム証明書ファイルを指定できるようになります。このファイルは、RunnerがGitLabサーバーへのアクセスを試行するたびに読み取られます。GitLab Runner Helmチャートを使用している場合は、[カスタム証明書を使用してGitLabにアクセスする](../install/kubernetes_helm_chart_configuration.md#access-gitlab-with-a-custom-certificate)の説明に従って証明書を設定する必要があります。 - **PEM証明書を読み取る**: GitLab Runnerは、定義済みのファイルからPEM証明書(**DER形式はサポートされていない**)を読み取ります。 - GitLab Runnerが`root`として実行されている場合は、*nixシステムの`/etc/gitlab-runner/certs/gitlab.example.com.crt`。 サーバーアドレスが`https://gitlab.example.com:8443/`の場合は、`/etc/gitlab-runner/certs/gitlab.example.com.crt`に証明書ファイルを作成します。 `openssl`クライアントを使用して、GitLabインスタンスの証明書を`/etc/gitlab-runner/certs`にダウンロードできます。 ```shell openssl s_client -showcerts -connect gitlab.example.com:443 -servername gitlab.example.com < /dev/null 2>/dev/null | openssl x509 -outform PEM > /etc/gitlab-runner/certs/gitlab.example.com.crt ``` ファイルが正しくインストールされていることを検証するには、`openssl`などのツールを使用できます。下記は例です: ```shell echo | openssl s_client -CAfile /etc/gitlab-runner/certs/gitlab.example.com.crt -connect gitlab.example.com:443 -servername gitlab.example.com ``` - GitLab Runnerが非`root`として実行されている場合は、*nixシステムの`~/.gitlab-runner/certs/gitlab.example.com.crt`。 - その他のシステムの`./certs/gitlab.example.com.crt`。GitLab RunnerをWindowsサービスとして実行している場合、これは機能しません。代わりに、カスタム証明書ファイルを指定してください。 ノート: - GitLabサーバー証明書がCAによって署名されている場合は、GitLabサーバー署名証明書ではなくCA証明書を使用してください。場合によっては、中間証明書もチェーンに追加する必要があります。たとえば、プライマリ証明書、中間証明書、ルート証明書がある場合は、それらすべてを1つのファイルにまとめることができます。 ```plaintext -----BEGIN CERTIFICATE----- (Your primary SSL certificate: your_domain_name.crt) -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- (Your intermediate certificate) -----END CERTIFICATE----- -----BEGIN CERTIFICATE----- (Your root certificate) -----END CERTIFICATE----- ``` - 既存のRunnerの証明書を更新する場合は、[再起動](../commands/_index.md#gitlab-runner-restart)します。 - HTTPを介してすでにRunnerを設定している場合は、`config.toml`でインスタンスパスをGitLabインスタンスの新しいHTTPS URLに更新します。 - 一時的な安全性の低い回避策として、証明書の検証をスキップする方法があります。このためには、`.gitlab-ci.yml`ファイルの`variables:`セクションでCI変数`GIT_SSL_NO_VERIFY`を`true`に設定します。 ### Gitのクローン {#git-cloning} Runnerは、`CI_SERVER_TLS_CA_FILE`を使用してCAチェーンを構築するために不足している証明書を挿入します。これにより、公的に信頼されている証明書を使用しないサーバーで`git clone`とアーティファクトが機能するようになります。 このアプローチは安全ですが、Runnerが単一信頼点になります。 ## Docker executorとKubernetes executorのTLS証明書を信頼する {#trusting-tls-certificates-for-docker-and-kubernetes-executors} コンテナに証明書を登録する際には、次の情報を考慮してください。 - ユーザースクリプトの実行に使用される[**ユーザーイメージ**](https://docs.gitlab.com/ci/yaml/#image)。ユーザースクリプトの証明書を信頼するシナリオでは、証明書のインストール方法についてユーザーが責任を担う必要があります。証明書のインストール手順は、イメージによって異なることがあります。Runnerは、発生し得るすべてのシナリオにおいて証明書をインストールする方法を把握することはできません。 - Git、アーティファクト、およびキャッシュオペレーションの処理に使用される[**Runnerヘルパーイメージ**](advanced-configuration.md#helper-image)。他のCI/CDステージの証明書を信頼するシナリオでは、ユーザーが行う必要がある操作は、特定の場所(`/etc/gitlab-runner/certs/ca.crt`など)で証明書ファイルを使用できるようにすることだけです。Dockerコンテナがユーザーのために証明書ファイルを自動的にインストールします。 ### ユーザースクリプトの証明書を信頼する {#trusting-the-certificate-for-user-scripts} ビルドがTLSと自己署名証明書またはカスタム証明書を使用する場合は、ピア通信のためにビルドジョブに証明書をインストールします。デフォルトでは、ユーザースクリプトを実行しているDockerコンテナには証明書ファイルがインストールされていません。これは、カスタムキャッシュホストを使用するか、セカンダリ`git clone`を実行するか、`wget`のようなツールでファイルをフェッチするために必要になる場合があります。 証明書をインストールするには、次の手順に従います。 1. 必要なファイルをDockerボリュームとしてマップして、スクリプトを実行するDockerコンテナがこれらのファイルを認識できるようにします。このためには、たとえば`config.toml`ファイルの`[runners.docker]`内でそれぞれのキーの中にボリュームを追加します。 - **Linux**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "ubuntu:latest" # Add path to your ca.crt file in the volumes list volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` 1. **Linuxのみ**: [`pre_build_script`](advanced-configuration.md#the-runners-section)で、次の操作を行うマップされたファイル(`ca.crt`など)を使用します。 1. Dockerコンテナ内の`/usr/local/share/ca-certificates/ca.crt`にこのファイルをコピーします。 1. `update-ca-certificates --fresh`を実行してインストールします。次に例を示します(コマンドは使用しているディストリビューションによって異なります)。 - Ubuntu: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" # Copy and install CA certificate before each job pre_build_script = """ apt-get update -y > /dev/null apt-get install -y ca-certificates > /dev/null cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates --fresh > /dev/null """ ``` - Alpine: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" # Copy and install CA certificate before each job pre_build_script = """ apk update >/dev/null apk add ca-certificates > /dev/null rm -rf /var/cache/apk/* cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates --fresh > /dev/null """ ``` 使用可能なGitLabサーバーCA証明書のみが必要な場合は、`CI_SERVER_TLS_CA_FILE`変数に格納されているファイルから取得できます。 ```shell curl --cacert "${CI_SERVER_TLS_CA_FILE}" ${URL} -o ${FILE} ``` ### 他のCI/CDステージの証明書を信頼する {#trusting-the-certificate-for-the-other-cicd-stages} Linuxでは`/etc/gitlab-runner/certs/ca.crt`に、Windowsでは`C:\GitLab-Runner\certs\ca.crt`に証明書ファイルをマップできます。Runnerヘルパーイメージは、起動時にこのユーザー定義の`ca.crt`ファイルをインストールし、クローンやアーティファクトのアップロードなどの操作を実行するときにこのファイルを使用します。 #### Docker {#docker} - **Linux**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "ubuntu:latest" # Add path to your ca.crt file in the volumes list volumes = ["/cache", "/path/to-ca-cert-dir/ca.crt:/etc/gitlab-runner/certs/ca.crt:ro"] ``` - **Windows**: ```toml [[runners]] name = "docker" url = "https://example.com/" token = "TOKEN" executor = "docker" [runners.docker] image = "mcr.microsoft.com/windows/servercore:21H2" # Add directory holding your ca.crt file in the volumes list volumes = ["c:\\cache", "c:\\path\\to-ca-cert-dir:C:\\GitLab-Runner\\certs:ro"] ``` #### Kubernetes {#kubernetes} Kubernetesで実行されているジョブに証明書ファイルを提供するには、次の手順に従います。 1. ネームスペースに証明書をKubernetesシークレットとして保存します。 ```shell kubectl create secret generic --namespace --from-file= ``` 1. ``と``を適切な値に置き換えて、Runnerでシークレットをボリュームとしてマウントします。 ```toml gitlab-runner: runners: config: | [[runners]] [runners.kubernetes] namespace = "{{.Release.Namespace}}" image = "ubuntu:latest" [[runners.kubernetes.volumes.secret]] name = "" mount_path = "" ``` `mount_path`は、証明書が保存されているコンテナ内のディレクトリです。`mount_path`として`/etc/gitlab-runner/certs/`を使用し、証明書ファイルとして`ca.crt`を使用した場合、証明書はコンテナ内の`/etc/gitlab-runner/certs/ca.crt`にあります。 1. ジョブの一部として、マップされた証明書ファイルをシステム証明書ストアにインストールします。たとえば、Ubuntuコンテナでは次のようになります。 ```yaml script: - cp /etc/gitlab-runner/certs/ca.crt /usr/local/share/ca-certificates/ - update-ca-certificates ``` Kubernetes executorによるヘルパーイメージの`ENTRYPOINT`の処理には、[既知のイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28484)があります。証明書ファイルがマップされている場合、この証明書ファイルはシステム証明書ストアに自動的にインストールされません。 ## トラブルシューティング {#troubleshooting} 一般的な[SSLトラブルシューティング](https://docs.gitlab.com/omnibus/settings/ssl/ssl_troubleshooting/)のドキュメントを参照してください。 また、[`tlsctl`](https://gitlab.com/gitlab-org/ci-cd/runner-tools/tlsctl)ツールを使用してRunner側からGitLab証明書をデバッグできます。 ### エラー: `x509: certificate signed by unknown authority` {#error-x509-certificate-signed-by-unknown-authority} このエラーは、executorイメージをプライベートレジストリからプルしようとしたときに、RunnerがexecutorをスケジュールするDockerホストまたはKubernetesノードが、プライベートレジストリの証明書を信頼していない場合に発生する可能性があります。 このエラーを修正するには、関連するルート認証局または証明書チェーンをシステムのトラストストアに追加し、コンテナサービスを再起動します。 UbuntuまたはAlpineを使用している場合は、次のコマンドを実行します。 ```shell cp ca.crt /usr/local/share/ca-certificates/ca.crt update-ca-certificates systemctl restart docker.service ``` UbuntuとAlpine以外のオペレーティングシステムの場合は、オペレーティングシステムのドキュメントを参照して、信頼できる証明書をインストールするための適切なコマンドを確認してください。 GitLab RunnerのバージョンとDockerホスト環境によっては、`FF_RESOLVE_FULL_TLS_CHAIN`機能フラグを無効にする必要もある場合があります。 ### ジョブでの`apt-get: not found`エラー {#apt-get-not-found-errors-in-jobs} [`pre_build_script`](advanced-configuration.md#the-runners-section)コマンドは、Runnerが実行するすべてのジョブよりも前に実行されます。`apk`または`apt-get`のようなディストリビューション固有のコマンドは、イシューを引き起こす可能性があります。ユーザースクリプトの証明書をインストールすると、これらのスクリプトが異なるディストリビューションに基づいた[イメージ](https://docs.gitlab.com/ci/yaml/#image)を使用している場合に、CIジョブが失敗する可能性があります。 たとえば、CIジョブがUbuntuイメージとAlpineイメージを実行する場合、AlpineではUbuntuコマンドは失敗します。`apt-get: not found`エラーは、Alpineベースイメージを使用するジョブで発生します。このイシューを解決するには、次のいずれかを実行します。 - ディストリビューションに依存しない`pre_build_script`を作成します。 - [タグ](https://docs.gitlab.com/ci/yaml/#tags)を使用して、Runnerが互換性のあるイメージを持つジョブのみをピックアップするようにします。 ### エラー: `self-signed certificate in certificate chain` {#error-self-signed-certificate-in-certificate-chain} CI/CDジョブが次のエラーで失敗します。 ```plaintext fatal: unable to access 'https://gitlab.example.com/group/project.git/': SSL certificate problem: self-signed certificate in certificate chain ``` ただし[OpenSSLデバッグコマンド](https://docs.gitlab.com/omnibus/settings/ssl/ssl_troubleshooting/#useful-openssl-debugging-commands)ではエラーが検出されません。 このエラーは、Gitが接続時に使用するプロキシが、`openssl s_client`トラブルシューティングコマンドではデフォルトで使用されないプロキシである場合に発生する可能性があります。Gitがプロキシを使用してリポジトリをフェッチするかどうかを検証するには、デバッグを有効にします。 ```yaml variables: GIT_CURL_VERBOSE: 1 ``` Gitがプロキシを使用しないようにするには、`NO_PROXY`変数にGitLabホスト名が含まれているようにします。 ```yaml variables: NO_PROXY: gitlab.example.com ``` ================================================ FILE: docs-locale/ja-jp/development/_index.md ================================================ --- stage: Verify group: Runner info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Contribute to GitLab Runner development --- ================================================ FILE: docs-locale/ja-jp/development/add-windows-version.md ================================================ --- stage: Verify group: Runner info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Add Docker executor support for a Windows version --- ================================================ FILE: docs-locale/ja-jp/development/internal/ci/packages_iteration.md ================================================ --- stage: Verify group: Runner info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Packages Iteration --- ================================================ FILE: docs-locale/ja-jp/development/internal/engineering/executor_interface/_index.md ================================================ --- stage: Verify group: Runner info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Internal Executor Interface --- ================================================ FILE: docs-locale/ja-jp/development/reviewing-gitlab-runner.md ================================================ --- stage: Verify group: Runner info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Reviewing GitLab Runner --- ================================================ FILE: docs-locale/ja-jp/executors/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: executor --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerはさまざまなexecutorを実装しています。これらのexecutorは、さまざまな環境でビルドを実行するために使用できます。 どのexecutorを選択すればよいかわからない場合は、[executorを選択する](#selecting-the-executor)を参照してください。 各executorでサポートされている機能の詳細については、[互換性チャート](#compatibility-chart)を参照してください。 GitLab Runnerは次のexecutorを提供します。 - [SSH](ssh.md) - [Shell](shell.md) - [Parallels](parallels.md) - [VirtualBox](virtualbox.md) - [Docker](docker.md) - [Docker Autoscaler](docker_autoscaler.md) - [Docker Machine(オートスケーリング)](docker_machine.md) - [Kubernetes](kubernetes/_index.md) - [インスタンス](instance.md) - [カスタム](custom.md) これらのexecutorはロックされており、新規のexecutorの開発や受け入れは行っていません。詳細については、[新しいexecutorのコントリビュート](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/CONTRIBUTING.md#contributing-new-executors)を参照してください。 ## Docker以外のexecutorの前提条件 {#prerequisites-for-non-docker-executors} [ヘルパーイメージに依存しない](../configuration/advanced-configuration.md#helper-image)executorでは、ターゲットマシンと`PATH`にGitがインストールされている必要があります。常に[利用可能な最新バージョンのGit](https://git-scm.com/downloads/)を使用してください。 ターゲットマシンに[Git LFS](https://git-lfs.com/)がインストールされている場合、GitLab Runnerは`git lfs`コマンドを使用します。GitLab Runnerがこれらのexecutorを使用するすべてのシステムで、Git LFSが最新であることを確認してください。 `git lfs install`を使用して、GitLab Runnerコマンドを実行するユーザーに対してGit LFSを初期化してください。システム全体でGit LFSを初期化するには、`git lfs install --system`を使用します。 GitLabインスタンスとのGitインタラクションを認証するため、GitLab Runnerでは[`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/)を使用します。[FF_GIT_URLS_WITHOUT_TOKENS](../configuration/feature-flags.md)の設定によっては、Git認証情報のヘルパー([Git認証情報マネージャー](https://github.com/git-ecosystem/git-credential-manager)など)がインストールされていて、認証情報をキャッシュに入れるように設定されている場合、最後に使用された認証情報がそのヘルパーのキャッシュに入れられることがあります。 - [FF_GIT_URLS_WITHOUT_TOKENS](../configuration/feature-flags.md)が`false`なら、最後に使用された[`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/)が、インストール済みのGit認証情報ヘルパーに保存されます。 - [FF_GIT_URLS_WITHOUT_TOKENS](../configuration/feature-flags.md)が`true`なら、[`CI_JOB_TOKEN`](https://docs.gitlab.com/ci/jobs/ci_job_token/)は、インストール済みのGit認証情報ヘルパーに保存されず、そのキャッシュに入れられることもありません。 ## executorを選択する {#selecting-the-executor} executorは、プロジェクトをビルドするためのさまざまなプラットフォームと開発手法をサポートしています。次の表に、使用するexecutorを決定する際に役立つ各executorの重要な情報を示します。 | executor | SSH | Shell | VirtualBox | Parallels | Docker | Docker Autoscaler | インスタンス | Kubernetes | カスタム | |:-------------------------------------------------|:----:|:--------------:|:--------------:|:--------------:|:------:|:-----------------:|-------------------------:|:--------------:|:------------------------:| | すべてのビルドのためのクリーンなビルド環境 | ✗ | ✗ | ✓ | ✓ | ✓ | ✓ | 条件付き4 | ✓ | 条件付き4 | | 存在する場合は、以前のクローンを再利用する | ✓ | ✓ | ✗ | ✗ | ✓ | ✓ | 条件付き4 | ✓ 6 | 条件付き4 | | Runnerファイルシステムへのアクセスが保護されている5 | ✓ | ✗ | ✓ | ✓ | ✓ | ✓ | ✗ | ✓ | 条件付き | | Runnerマシンを移行する | ✗ | ✗ | 部分的 | 部分的 | ✓ | ✓ | ✓ | ✓ | ✓ | | 同時ビルドのゼロ設定サポート | ✗ | ✗ 1 | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | 条件付き4 | | 複雑なビルド環境 | ✗ | ✗ 2 | ✓ 3 | ✓ 3 | ✓ | ✓ | ✗ 2 | ✓ | ✓ | | ビルドの問題のデバッグ | 簡単 | 簡単 | 難しい | 難しい | 普通 | 普通 | 普通 | 普通 | 普通 | **補足説明**: 1. ビルドマシンにインストールされているサービスをビルドで使用する場合、executorを選択できますが、問題があります。 1. 依存関係を手動でインストールする必要があります。 1. たとえば、[Vagrant](https://developer.hashicorp.com/vagrant/docs/providers/virtualbox "VirtualBoxのVagrantドキュメント")を使用します。 1. プロビジョニングする環境によって異なります。完全に分離することも、ビルド間で共有することもできます。 1. Runnerのファイルシステムアクセスが保護されていない場合、ジョブはRunnerのトークンや他のジョブのキャッシュとコードなど、システム全体にアクセスできます。✓が付いているexecutorは、デフォルトではRunnerがファイルシステムにアクセスすることを許可していません。ただし、セキュリティ上の欠陥または特定の設定により、ジョブがコンテナからブレイクアウトし、Runnerをホスティングしているファイルシステムにアクセスする可能性があります。 1. [並行処理ごとの永続ビルドボリューム](kubernetes/_index.md#persistent-per-concurrency-build-volumes)設定が必要です。 ### Shell executor {#shell-executor} Shell executorは、GitLab Runnerの最もシンプルな設定オプションです。GitLab Runnerがインストールされているシステムでジョブをローカルに実行し、すべての依存関係を同じマシンに手動でインストールする必要があります。 このexecutorは、Linux、macOS、およびFreeBSDオペレーティングシステムではBashをサポートし、Windows環境ではPowerShellをサポートしています。 最小限の依存関係を持つビルドにとって理想的ですが、ジョブ間の分離は限定的です。 ### Docker executor {#docker-executor} Docker executorは、コンテナを介してクリーンなビルド環境を提供します。すべての依存関係がDockerイメージにパッケージ化されているため、依存関係を容易に管理できます。このexecutorを使用するには、RunnerホストにDockerがインストールされている必要があります。 このexecutorは、MySQLなどの追加の[サービス](https://docs.gitlab.com/ci/services/)をサポートしています。また、Podmanを代替コンテナランタイムとして受け入れます。 このexecutorは、一貫性のある分離されたビルド環境を保持します。 ### Docker Machine Executor(非推奨) {#docker-machine-executor-deprecated} {{< alert type="warning" >}} この機能はGitLab 17.5で[非推奨](https://gitlab.com/gitlab-org/gitlab/-/issues/498268)になりました。20.0で削除される予定です。代わりに[GitLab Runner Autoscaler](../runner_autoscale/_index.md)を使用してください。 {{< /alert >}} Docker Machine Executorは、オートスケーリングに対応しているDocker executorの特別なバージョンです。標準的なDocker executorと同様に動作しますが、Docker Machineによってオンデマンドで作成されたビルドホストを使用します。この機能により、このexecutorはAWS EC2などのクラウド環境で特に効果的であり、さまざまなワークロードに対して優れた分離性とスケーラビリティを提供します。 ### Docker Autoscaler executor {#docker-autoscaler-executor} Docker Autoscaler executorは、Runnerマネージャーが処理するジョブに対処するために、オンデマンドでインスタンスを作成するオートスケール対応のDocker executorです。[Docker executor](docker.md)をラップしているため、すべてのDocker executorのオプションと機能がサポートされています。 Docker Autoscalerは、[フリートプラグイン](https://gitlab.com/gitlab-org/fleeting/fleeting)を使用してオートスケールします。フリートとは、オートスケールされたインスタンスのグループの抽象化であり、Google Cloud、AWS、Azureなどのクラウドプロバイダーをサポートするプラグインを使用します。このexecutorは、動的なワークロードの要件がある環境に特に適しています。 ### インスタンスexecutor {#instance-executor} インスタンスexecutorは、Runnerマネージャーが処理するジョブの予期されるボリュームに対処するために、オンデマンドでインスタンスを作成するオートスケール対応のexecutorです。 このexecutorと、関連するDocker Autoscale executorは、GitLab RunnerフリートおよびTaskscalerテクノロジーと連携する新しいオートスケールexecutorです。 インスタンスexecutorも[フリートプラグイン](https://gitlab.com/gitlab-org/fleeting/fleeting)を使用してオートスケールします。 ジョブがホストインスタンス、オペレーティングシステム、および接続デバイスへのフルアクセスを必要とする場合は、インスタンスexecutorを使用できます。インスタンスexecutorは、シングルテナントジョブとマルチテナントジョブに対応するように設定することもできます。 ### Kubernetes executor {#kubernetes-executor} ビルドに既存のKubernetesクラスターを使用する場合にKubernetes executorを使用できます。このexecutorはKubernetesクラスターAPIを呼び出して、各GitLab CI/CDジョブの新しいポッド(ビルドコンテナとサービスコンテナを含む)を作成します。このexecutorは、クラウドネイティブ環境に特に適しており、優れたスケーラビリティとリソース利用率を実現します。 ### SSH executor {#ssh-executor} SSH executorは完全性を期すために追加されましたが、サポートが最も少ないexecutorの1つです。SSH executorを使用すると、GitLab Runnerは外部サーバーに接続し、そこでビルドを実行します。このexecutorを使用している組織からの成功事例がいくつかありますが、通常は他のタイプのexecutorを使用してください。 ### カスタムexecutor {#custom-executor} カスタムexecutorを使用すると、独自の実行環境を指定できます。GitLab Runnerがexecutor(Linuxコンテナなど)を提供しない場合、カスタムの実行可能ファイルを使用して環境をプロビジョニングおよびクリーンアップできます。 ## 互換性チャート {#compatibility-chart} 各種executorでサポートされている機能を以下に示します。 | executor | SSH | Shell | VirtualBox | Parallels | Docker | Docker Autoscaler | インスタンス | Kubernetes | カスタム | |:---------------------------------------------|:--------------:|:--------------:|:--------------:|:--------------:|:-------:|:-----------------:|:--------------:| :---------:| :-----------------------------------------------------------:| | セキュア変数 | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | `.gitlab-ci.yml`: イメージ | ✗ | ✗ | ✓(1) | ✓(1) | ✓ | ✓ | ✗ | ✓ | ✓([`$CUSTOM_ENV_CI_JOB_IMAGE`](custom.md#stages)を使用) | | `.gitlab-ci.yml`: サービス | ✗ | ✗ | ✗ | ✗ | ✓ | ✓ | ✗ | ✓ | ✓ | | `.gitlab-ci.yml`: キャッシュ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | `.gitlab-ci.yml`: アーティファクト | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | ステージ間のアーティファクトの受け渡し | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | GitLabコンテナレジストリのプライベートイメージを使用する | 該当なし | 該当なし | 該当なし | 該当なし | ✓ | ✓ | 該当なし | ✓ | 該当なし | | インタラクティブWebターミナル | ✗ | ✓(UNIX) | ✗ | ✗ | ✓ | ✗ | ✗ | ✓ | ✗ | 1. GitLab Runner 14.2でサポートが[追加](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1257)されました。詳細については、[ベースVMイメージの上書き](../configuration/advanced-configuration.md#overriding-the-base-vm-image)セクションを参照してください。 各種Shellでサポートされているシステムを以下に示します。 | Shell | Bash | PowerShell Desktop | PowerShell Core | Windows Batch(非推奨) | |:-------:|:-----------:|:------------------:|:---------------:|:--------------------------:| | Windows | ✗(4) | ✓(3) | ✓ | ✓(2) | | Linux | ✓(1) | ✗ | ✓ | ✗ | | macOS | ✓(1) | ✗ | ✓ | ✗ | | FreeBSD | ✓(1) | ✗ | ✗ | ✗ | 1. デフォルトのShell。 1. 非推奨。[`shell`](../configuration/advanced-configuration.md#the-runners-section)が指定されていない場合のデフォルトのShell。 1. 新しいRunnerの登録時のデフォルトのShell。 1. WindowsのBash Shellはサポートされていません。 各種ShellによりサポートされているインタラクティブWebターミナルのシステムを以下に示します。 | Shell | Bash | PowerShell Desktop | PowerShell Core | Windows Batch(非推奨) | |:-------:|:-----------:|:---------------------:|:------------------:|:--------------------------:| | Windows | ✗ | ✗ | ✗ | ✗ | | Linux | ✓ | ✗ | ✗ | ✗ | | macOS | ✓ | ✗ | ✗ | ✗ | | FreeBSD | ✓ | ✗ | ✗ | ✗ | ```mermaid flowchart LR Start([Executor
Selection]) --> Auto{Autoscaling?} Auto -->|YES| Platform{Platform?} Auto -->|NO| BuildType{Build
Type?} Platform -->|Cloud
Native| K8s[Kubernetes] Platform -->|Cloud
VMs| OS1{OS?} OS1 -->|Linux| L1[Fleeting:
Docker Autoscaler
or Instance] OS1 -->|macOS| M1[Fleeting:
Docker Autoscaler
or Instance] OS1 -->|Windows| W1[Fleeting:
Docker Autoscaler
or Instance] BuildType -->|Container| OS2{OS?} BuildType -->|Shell| OS3{OS?} OS2 -->|Linux| L2[Docker
Podman] OS2 -->|macOS| M2[Docker] OS2 -->|Windows| W2[Docker] OS3 -->|Linux| L3[Bash
Zsh] OS3 -->|macOS| M3[Bash
Zsh] OS3 -->|Windows| W3[PowerShell 5.1
PowerShell 7.x] OS3 -->|Remote| R3[SSH] classDef question fill:#e1f3fe,stroke:#333,stroke-width:2px,color:#000 classDef result fill:#dcffe4,stroke:#333,stroke-width:2px,color:#000 classDef start fill:#f9f9f9,stroke:#fff,stroke-width:2px,color:#000 class Start start; class Auto,Platform,BuildType,OS1,OS2,OS3 question; class K8s,L1,M1,W1,L2,M2,W2,L3,M3,W3,R3 result; ``` ================================================ FILE: docs-locale/ja-jp/executors/custom.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: カスタムexecutor --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、ネイティブでサポートされていない環境向けに、Custom executorを提供します。例: `LXD`、`Libvirt`。 GitLab Runnerを設定して、プロビジョニング、実行、および環境のクリーンアップを行う実行可能ファイルを指定することで、独自のexecutorを作成できます。 カスタムexecutor用に設定したスクリプトは、`Drivers`と呼ばれます。たとえば、[`LXD`ドライバー](custom_examples/lxd.md)や[`Libvirt`ドライバー](custom_examples/libvirt.md)を作成できます。 ## 設定 {#configuration} いくつかの設定キーから選択できます。そのうちのいくつかはオプションです。 以下に、使用可能なすべての設定キーを使用した、カスタムexecutorの設定の例を示します: ```toml [[runners]] name = "custom" url = "https://gitlab.com" token = "TOKEN" executor = "custom" builds_dir = "/builds" cache_dir = "/cache" shell = "bash" [runners.custom] config_exec = "/path/to/config.sh" config_args = [ "SomeArg" ] config_exec_timeout = 200 prepare_exec = "/path/to/script.sh" prepare_args = [ "SomeArg" ] prepare_exec_timeout = 200 run_exec = "/path/to/binary" run_args = [ "SomeArg" ] cleanup_exec = "/path/to/executable" cleanup_args = [ "SomeArg" ] cleanup_exec_timeout = 200 graceful_kill_timeout = 200 force_kill_timeout = 200 ``` フィールドの定義と必要なフィールドについては、[`[runners.custom]`セクション](../configuration/advanced-configuration.md#the-runnerscustom-section)の設定を参照してください。 さらに、[`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section)内の`builds_dir`と`cache_dir`の両方が必須フィールドです。 ## ジョブを実行するための前提条件となるソフトウェア {#prerequisite-software-for-running-a-job} ユーザーは、`PATH`に存在する必要がある以下を含む環境をセットアップする必要があります: - [Git](https://git-scm.com/download)と[Git LFS](https://git-lfs.com/) :[共通の前提条件](_index.md#prerequisites-for-non-docker-executors)を参照してください。 - [GitLab Runner](../install/_index.md): アーティファクトとキャッシュをダウンロード/更新するために使用されます。 ## ステージ {#stages} Custom executorは、ジョブの詳細を設定し、環境を準備およびクリーンアップし、ジョブスクリプトを実行するためのステージを提供します。各ステージは特定のことを担当し、留意すべき点が異なります。 Custom executorによって実行される各ステージは、組み込みのGitLab Runner executorが実行するタイミングで実行されます。 実行される各ステップは、実行中のジョブに関する情報を提供する特定の環境変数にアクセスできます。すべてのステージで、次の環境変数を使用できます: - 標準のCI/CD [環境変数](https://docs.gitlab.com/ci/variables/) ([定義済み変数](https://docs.gitlab.com/ci/variables/predefined_variables/)を含む)。 - Custom executor Runnerホストシステムによって提供されるすべての環境変数。 - すべてのサービスとそれらの[利用可能な設定](https://docs.gitlab.com/ci/services/#available-settings-for-services)。`CUSTOM_ENV_CI_JOB_SERVICES`としてJSON形式で公開されます。 CI/CD環境変数と定義済み変数の両方に、システムの環境変数との競合を防ぐために`CUSTOM_ENV_`というプレフィックスが付きます。たとえば、`CI_BUILDS_DIR`は`CUSTOM_ENV_CI_BUILDS_DIR`として利用できます。 ステージは次の順序で実行されます: 1. `config_exec` 1. `prepare_exec` 1. `run_exec` 1. `cleanup_exec` ### サービス {#services} [サービス](https://docs.gitlab.com/ci/services/)は、`CUSTOM_ENV_CI_JOB_SERVICES`としてJSON配列で公開されます。 次に例を示します: ```yaml custom: script: - echo $CUSTOM_ENV_CI_JOB_SERVICES services: - redis:latest - name: my-postgres:9.4 alias: pg entrypoint: ["path", "to", "entrypoint"] command: ["path", "to", "cmd"] ``` 上記の例では、`CUSTOM_ENV_CI_JOB_SERVICES`環境変数に次の値を設定します: ```json [{"name":"redis:latest","alias":"","entrypoint":null,"command":null},{"name":"my-postgres:9.4","alias":"pg","entrypoint":["path","to","entrypoint"],"command":["path","to","cmd"]}] ``` ### 設定 {#config} 設定ステージは、`config_exec`によって実行されます。 実行時にいくつかの設定を設定したい場合があります。たとえば、プロジェクトIDに基づいてビルドディレクトリを設定します。`config_exec`は、STDOUTから読み取り、特定のキーを持つ有効なJSON文字列を予期します。 次に例を示します: ```shell #!/usr/bin/env bash cat << EOS { "builds_dir": "/builds/${CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID}/${CUSTOM_ENV_CI_PROJECT_PATH_SLUG}", "cache_dir": "/cache/${CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID}/${CUSTOM_ENV_CI_PROJECT_PATH_SLUG}", "builds_dir_is_shared": true, "hostname": "custom-hostname", "driver": { "name": "test driver", "version": "v0.0.1" }, "job_env" : { "CUSTOM_ENVIRONMENT": "example" }, "shell": "bash" } EOS ``` JSON文字列内の追加のキーはすべて無視されます。有効なJSON文字列でない場合、ステージは失敗し、さらに2回再試行されます。 | パラメータ | 型 | 必須 | 空にすることが許可されています | 説明 | |------------------------|---------|----------|----------------|-------------| | `builds_dir` | 文字列 | ✗ | ✗ | ジョブの作業ディレクトリが作成されるベースディレクトリ。 | | `cache_dir` | 文字列 | ✗ | ✗ | ローカルキャッシュが格納されるベースディレクトリ。 | | `builds_dir_is_shared` | ブール値 | ✗ | 該当なし | 同時ジョブ間で環境が共有されるかどうかを定義します。 | | `hostname` | 文字列 | ✗ | ✓ | Runnerによって格納されるジョブの「メタデータ」に関連付けるホスト名。未定義の場合、ホスト名は設定されません。 | | `driver.name` | 文字列 | ✗ | ✓ | ドライバーのユーザー定義名。`Using custom executor...`行と一緒に出力されます。未定義の場合、ドライバーに関する情報は出力されません。 | | `driver.version` | 文字列 | ✗ | ✓ | ドライバーのユーザー定義バージョン。`Using custom executor...`行と一緒に出力されます。未定義の場合、名前情報のみが出力されます。 | | `job_env` | オブジェクト | ✗ | ✓ | ジョブ実行の後続のすべてのステージで、環境変数を介して使用できる名前と値のペア。それらは、ジョブではなく、ドライバーで使用できます。詳細については、[`job_env`の使用方法](#job_env-usage)を参照してください。 | | `shell` | 文字列 | ✗ | ✓ | ジョブスクリプトの実行に使用されるシェル。 | 実行可能ファイルの`STDERR`は、ジョブログに出力されます。 [`config_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section)を設定して、プロセスを終了する前に、GitLab RunnerがJSON文字列の読み取りを待機する時間の上限を設定できます。 [`config_args`](../configuration/advanced-configuration.md#the-runnerscustom-section)を定義すると、定義した順序で`config_exec`実行可能ファイルに追加されます。たとえば、次の`config.toml`コンテンツがあるとします: ```toml ... [runners.custom] ... config_exec = "/path/to/config" config_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runnerは、`/path/to/config Arg1 Arg2`として実行します。 #### `job_env`の使用法 {#job_env-usage} `job_env`設定の主な目的は、ジョブ実行の後続のステージのために、**カスタムexecutorドライバー呼び出しのコンテキストに**変数を渡すことです。 たとえば、ジョブ実行環境との接続で、いくつかの認証情報の準備が必要なドライバー。この操作はコストがかかります。ドライバーは、環境に接続する前に、ローカルプロバイダーから一時的なSSH認証情報をリクエストする必要があります。 カスタムexecutor実行フローでは、各ジョブ実行[ステージ](#stages) (`prepare`、複数の`run`呼び出し、および`cleanup`) は、独自のコンテキストを持つ個別の実行として実行されます。認証情報を解決する例では、認証情報プロバイダーへの接続を毎回行う必要があります。 この操作にコストがかかる場合は、ジョブの実行全体に対して1回実行し、すべてのジョブ実行ステージに対して認証情報を再利用します。`job_env`はここで役立ちます。これにより、`config_exec`呼び出し中にプロバイダーと1回接続し、`job_env`で受信した認証情報を渡すことができます。次に、カスタムexecutorが[`prepare_exec`](#prepare) 、[`run_exec`](#run) 、および[`cleanup_exec`](#cleanup)に呼び出しを行う変数のリストに追加されます。これにより、認証情報プロバイダーに毎回接続する代わりに、ドライバーは変数を読み取り、存在する認証情報を使用するだけです。 理解しておくべき重要なことは、**変数はジョブ自体では自動的に利用できない**ということです。これは、カスタムexecutorドライバーがどのように実装されているかに完全に依存し、多くの場合、そこには存在しません。 `job_env`設定を使用して、特定のRunnerによって実行されるすべてのジョブに変数のセットを渡す方法については、[`environment`設定(`[[runners]]`から)](../configuration/advanced-configuration.md#the-runners-section)を参照してください。 変数が動的で、ジョブ間で値が変化する可能性がある場合は、ドライバーの実装で、`job_env`によって渡される変数を実行呼び出しに追加するようにしてください。 ### 準備 {#prepare} 準備ステージは、`prepare_exec`によって実行されます。 この時点で、GitLab Runnerはジョブ(どこでどのように実行されるか)に関するすべてを認識しています。残っているのは、ジョブを実行できるように、環境をセットアップすることだけです。GitLab Runnerは、`prepare_exec`で指定された実行可能ファイルを実行します。 このアクションは、環境(たとえば、仮想マシンまたはコンテナ、サービスなどを作成する)のセットアップを担当します。これが完了すると、環境はジョブを実行する準備ができていると予想されます。 このステージは、ジョブの実行で1回だけ実行されます。 [`prepare_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section)を設定して、GitLab Runnerがプロセスを終了する前に環境の準備を待機する時間の上限を設定できます。 この実行可能ファイルから返された`STDOUT`と`STDERR`は、ジョブログに出力されます。 [`prepare_exec_args`](../configuration/advanced-configuration.md#the-runnerscustom-section)を定義すると、定義した順序で`prepare_exec`実行可能ファイルに追加されます。たとえば、次の`config.toml`コンテンツがあるとします: ```toml ... [runners.custom] ... prepare_exec = "/path/to/bin" prepare_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runnerは、`/path/to/bin Arg1 Arg2`として実行します。 ### 実行 {#run} 実行ステージは`run_exec`によって実行されます。 この実行可能ファイルから返された`STDOUT`と`STDERR`は、ジョブログに出力されます。 他のステージとは異なり、`run_exec`ステージは複数回実行されます。これは、以下のサブステージに分割され、順番にリストされているためです: 1. `prepare_script` 1. `get_sources` 1. `restore_cache` 1. `download_artifacts` 1. `step_*` 1. `build_script` 1. `step_*` 1. `after_script` 1. `archive_cache`または`archive_cache_on_failure` 1. `upload_artifacts_on_success`または`upload_artifacts_on_failure` 1. `cleanup_file_variables` 上記の各ステージでは、`run_exec`実行可能ファイルは以下で実行されます: - 通常の環境変数。 - 2つの引数: - GitLab Runnerがカスタムexecutorの実行用に作成するスクリプトへのパス。 - ステージの名前。 次に例を示します: ```shell /path/to/run_exec.sh /path/to/tmp/script1 prepare_executor /path/to/run_exec.sh /path/to/tmp/script1 prepare_script /path/to/run_exec.sh /path/to/tmp/script1 get_sources ``` `run_args`が定義されている場合、これらは`run_exec`実行可能ファイルに渡される最初の引数のセットであり、GitLab Runnerがその他を追加します。たとえば、次の`config.toml`があるとします: ```toml ... [runners.custom] ... run_exec = "/path/to/run_exec.sh" run_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runnerは、次の引数で実行可能ファイルを実行します: ```shell /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 prepare_executor /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 prepare_script /path/to/run_exec.sh Arg1 Arg2 /path/to/tmp/script1 get_sources ``` この実行可能ファイルは、最初の引数で指定されたスクリプトを実行する役割を担う必要があります。これらには、クローン作成、アーティファクトのダウンロード、ユーザースクリプトの実行、および以下に説明するその他すべてのステップを実行するために、GitLab Runner executorが実行するすべてのスクリプトが含まれています。スクリプトは、次のシェルにすることができます: - Bash - PowerShell Desktop - PowerShell Core - バッチ処理(非推奨) スクリプトは、[`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section)内の`shell`によって設定されたシェルを使用して生成します。何も指定されていない場合は、OSプラットフォームのデフォルトが使用されます。 下の表は、各スクリプトが何を行い、そのスクリプトの主な目的が何かを詳細に説明したものです。 | スクリプト名 | スクリプトの内容 | |-------------------------------|-----------------| | `prepare_script` | ジョブが実行されているマシンに関するデバッグ情報。 | | `get_sources` | Git設定を準備し、リポジトリをクローン/フェッチします。GitLabが提供するGit戦略のすべてのメリットが得られるため、これをそのままにしておくことをお勧めします。 | | `restore_cache` | キャッシュが定義されている場合は、展開します。これには、`gitlab-runner`バイナリが`$PATH`で使用可能であることが必要です。 | | `download_artifacts` | アーティファクトが定義されている場合は、ダウンロードします。これには、`gitlab-runner`バイナリが`$PATH`で使用可能であることが必要です。 | | `step_*` | GitLabによって生成されます。実行するスクリプトのセット。カスタムexecutorに送信されない場合があります。`step_release`や`step_accessibility`など、複数のステップがある場合があります。これは、`.gitlab-ci.yml`ファイルの機能である可能性があります。 | | `after_script` | ジョブから定義された[`after_script`](https://docs.gitlab.com/ci/yaml/#before_script-and-after_script)。このスクリプトは、以前のステップのいずれかが失敗した場合でも、常に呼び出しされます。 | | `archive_cache` | キャッシュが定義されている場合は、すべてのキャッシュのアーカイブを作成します。`build_script`が成功した場合にのみ実行されます。 | | `archive_cache_on_failure` | キャッシュが定義されている場合は、すべてのキャッシュのアーカイブを作成します。`build_script`が失敗した場合にのみ実行されます。 | | `upload_artifacts_on_success` | アーティファクトが定義されている場合は、アップロードします。`build_script`が成功した場合にのみ実行されます。 | | `upload_artifacts_on_failure` | アーティファクトが定義されている場合は、アップロードします。`build_script`が失敗した場合にのみ実行されます。 | | `cleanup_file_variables` | ディスクからすべての[ファイルベース](https://docs.gitlab.com/ci/variables/#custom-environment-variables-of-type-file)変数を削除します。 | ### クリーンアップ {#cleanup} クリーンアップステージは`cleanup_exec`によって実行されます。 この最後のステージは、以前のステージのいずれかが失敗した場合でも実行されます。このステージの主な目標は、セットアップされた可能性のある環境をクリーンアップすることです。たとえば、VMをオフにするか、コンテナを削除します。 `cleanup_exec`の結果は、ジョブのステータスに影響を与えません。たとえば、次のことが発生した場合でも、ジョブは成功としてマークされます: - `prepare_exec`と`run_exec`の両方が成功します。 - `cleanup_exec`が失敗します。 [`cleanup_exec_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section)を設定して、GitLab Runnerがプロセスを終了する前に環境のクリーンアップを待機する時間の上限を設定できます。 この実行可能ファイルの`STDOUT`は、`DEBUG`レベルでGitLab Runnerログに出力されます。`STDERR`は、`WARN`レベルでログに出力されます。 [`cleanup_exec_args`](../configuration/advanced-configuration.md#the-runnerscustom-section)を定義すると、定義した順序で`cleanup_exec`実行可能ファイルに追加されます。たとえば、次の`config.toml`コンテンツがあるとします: ```toml ... [runners.custom] ... cleanup_exec = "/path/to/bin" cleanup_args = [ "Arg1", "Arg2" ] ... ``` GitLab Runnerは、`/path/to/bin Arg1 Arg2`として実行します。 ## 実行可能ファイルの終了と強制終了 {#terminating-and-killing-executables} GitLab Runnerは、次のいずれかの条件で、実行可能ファイルを正常に終了しようとします: - `config_exec_timeout`、`prepare_exec_timeout`、または`cleanup_exec_timeout`が満たされた場合。 - ジョブが[タイムアウト](https://docs.gitlab.com/ci/pipelines/settings/#set-a-limit-for-how-long-jobs-can-run)します。 - ジョブがキャンセルされました。 タイムアウトに達すると、`SIGTERM`が実行可能ファイルに送信され、[`exec_terminate_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section)のカウントダウンが開始されます。実行可能ファイルは、このシグナルをリッスンして、リソースをクリーンアップするようにする必要があります。`exec_terminate_timeout`が経過してもプロセスが実行中の場合は、`SIGKILL`がプロセスを強制終了するために送信され、[`exec_force_kill_timeout`](../configuration/advanced-configuration.md#the-runnerscustom-section)が開始されます。`exec_force_kill_timeout`が完了した後もプロセスが実行中の場合、GitLab Runnerはプロセスを中断し、停止または強制終了を試行しなくなります。これらのタイムアウトの両方が`config_exec`、`prepare_exec`、または`run_exec`中に発生した場合、ビルドは失敗としてマークされます。 ドライバーによって起動された子プロセスも、上記のUNIXベースのシステムで説明されている正常終了プロセスを受け取ります。これは、メインプロセスを、すべての子プロセスが属する[プロセスグループ](https://man7.org/linux/man-pages/man2/setpgid.2.html)として設定することで実現されます。 ## Error handling {#error-handling} GitLab Runnerは、2種類のエラーを異なる方法で処理できます。これらのエラーは、`config_exec`、`prepare_exec`、`run_exec`、および`cleanup_exec`内の実行可能ファイルがこれらのコードで終了した場合にのみ処理されます。ユーザーがゼロ以外の終了コードで終了した場合、以下のエラーコードのいずれかとして伝播される必要があります。 ユーザースクリプトがこれらのコードの1つで終了した場合、実行可能ファイルの終了コードに伝播される必要があります。 ### ビルドの失敗 {#build-failure} GitLab Runnerは、ジョブの失敗を示す終了コードとして実行可能ファイルが使用する必要がある`BUILD_FAILURE_EXIT_CODE`環境変数を提供します。実行可能ファイルが`BUILD_FAILURE_EXIT_CODE`のコードで終了した場合、ビルドはGitLab CIで適切に失敗としてマークされます。 ユーザーが`.gitlab-ci.yml`ファイル内で定義するスクリプトがゼロ以外のコードで終了した場合、`run_exec`は`BUILD_FAILURE_EXIT_CODE`値で終了する必要があります。 {{< alert type="note" >}} ハードコードされた値の代わりに`BUILD_FAILURE_EXIT_CODE`を使用することを強く推奨します。これは、すべてのリリースで変更される可能性があり、バイナリ/スクリプトの将来性を保証するためです。 {{< /alert >}} ### ビルド失敗の終了コード {#build-failure-exit-code} ビルドが失敗した場合に終了コードを含むファイルをオプションで指定できます。ファイルの予期されるパスは、`BUILD_EXIT_CODE_FILE`環境変数を介して提供されます。次に例を示します: ```shell if [ $exit_code -ne 0 ]; then echo $exit_code > ${BUILD_EXIT_CODE_FILE} exit ${BUILD_FAILURE_EXIT_CODE} fi ``` CI/CDジョブは、[`allow_failure`](https://docs.gitlab.com/ci/yaml/#allow_failure)構文を利用するために、このメソッドを必要とします。 {{< alert type="note" >}} このファイルには、整数の終了コードのみを保存してください。追加情報があると、`unknown Custom executor executable exit code`エラーが発生する可能性があります。 {{< /alert >}} ### システム失敗 {#system-failure} `SYSTEM_FAILURE_EXIT_CODE`で指定されたエラーコードでプロセスを終了することにより、システム失敗をRunnerに送信できます。このエラーコードが返された場合、Runnerは特定のステージングを再試行します。再試行が成功しない場合、ジョブは失敗としてマークされます。 以下は、どのステージングが再試行されるか、および再試行回数を示す表です。 | ステージング名 | 試行回数 | 各再試行の間隔 | |----------------------|-------------------------------------------------------------|-------------------------------------| | `prepare_exec` | 3 | 3秒 | | `get_sources` | `GET_SOURCES_ATTEMPTS`変数の値。(デフォルトは1です)。 | 0秒 | | `restore_cache` | `RESTORE_CACHE_ATTEMPTS`変数の値。(デフォルトは1です)。 | 0秒 | | `download_artifacts` | `ARTIFACT_DOWNLOAD_ATTEMPTS`変数の値。(デフォルトは1です)。 | 0秒 | {{< alert type="note" >}} ハードコードされた値の代わりに`SYSTEM_FAILURE_EXIT_CODE`を使用することを強く推奨します。これは、すべてのリリースで変更される可能性があり、バイナリ/スクリプトの将来性を保証するためです。 {{< /alert >}} ## ジョブの応答 {#job-response} `CUSTOM_ENV_`変数は、ドキュメント化された[CI/CD変数の優先順位](https://docs.gitlab.com/ci/variables/#cicd-variable-precedence)を監視するため、ジョブレベルで変更できます。この機能は望ましい場合がありますが、信頼できるジョブコンテキストが必要な場合は、完全なJSONジョブ応答が自動的に提供されます。Runnerは一時ファイルを生成します。これは、`JOB_RESPONSE_FILE`環境変数で参照されます。このファイルはすべてのステージングに存在し、クリーンアップ中に自動的に削除されます。 ```shell $ cat ${JOB_RESPONSE_FILE} {"id": 123456, "token": "jobT0ken",...} ``` ================================================ FILE: docs-locale/ja-jp/executors/custom_examples/libvirt.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Custom executorでlibvirtを使用する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} [libvirt](https://libvirt.org/)を使用すると、Custom executorドライバーは、実行するジョブごとに新しいディスクとVMを作成し、その後、ディスクとVMは削除されます。 このドキュメントでは、libvirtのセットアップ方法については、スコープ外であるため説明しません。ただし、このドライバーは[GCPネストされた](https://docs.cloud.google.com/compute/docs/instances/nested-virtualization/overview)仮想化を使用してテストされており、ブリッジネットワーキングで[libvirtをセットアップする方法の詳細](https://docs.cloud.google.com/compute/docs/instances/nested-virtualization/overview#starting_a_private_bridge_between_the_host_and_nested_vms)も記載されています。この例では、libvirtのインストール時に付属する`default`ネットワークを使用するため、実行されていることを確認してください。 このドライバーはブリッジネットワーキングを必要とします。これは、各VMが専用のIPアドレスを持っている必要があるため、GitLab RunnerがSSH内部でコマンドを実行できるためです。SSHキーは、[次のコマンドを使用して](https://docs.gitlab.com/user/ssh/#generate-an-ssh-key-pair)生成できます。 依存関係がすべてのビルドでダウンロードされないように、ベースディスクVMイメージが作成されます。次の例では、ディスクVMイメージを作成するために[virt-builder](https://libguestfs.org/virt-builder.1.html)が使用されています。 ```shell virt-builder debian-12 \ --size 8G \ --output /var/lib/libvirt/images/gitlab-runner-base.qcow2 \ --format qcow2 \ --hostname gitlab-runner-bookworm \ --network \ --install curl \ --run-command 'curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" | bash' \ --run-command 'curl -s "https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh" | bash' \ --run-command 'useradd -m -p "" gitlab-runner -s /bin/bash' \ --install gitlab-runner,git,git-lfs,openssh-server \ --run-command "git lfs install --skip-repo" \ --ssh-inject gitlab-runner:file:/root/.ssh/id_rsa.pub \ --run-command "echo 'gitlab-runner ALL=(ALL) NOPASSWD: ALL' >> /etc/sudoers" \ --run-command "sed -E 's/GRUB_CMDLINE_LINUX=\"\"/GRUB_CMDLINE_LINUX=\"net.ifnames=0 biosdevname=0\"/' -i /etc/default/grub" \ --run-command "grub-mkconfig -o /boot/grub/grub.cfg" \ --run-command "echo 'auto eth0' >> /etc/network/interfaces" \ --run-command "echo 'allow-hotplug eth0' >> /etc/network/interfaces" \ --run-command "echo 'iface eth0 inet dhcp' >> /etc/network/interfaces" ``` 上記のコマンドは、[前提条件](../custom.md#prerequisite-software-for-running-a-job)以前に指定されたすべてをインストールします。 `virt-builder`は、最後に印刷されるルートパスワードを自動的に設定します。パスワードを自分で指定する場合は、[`--root-password password:$SOME_PASSWORD`](https://libguestfs.org/virt-builder.1.html#setting-the-root-password)を渡します。 ## 設定 {#configuration} 以下は、libvirtのGitLab Runner設定の例です: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "libvirt-driver" url = "https://gitlab.com/" token = "xxxxx" executor = "custom" builds_dir = "/home/gitlab-runner/builds" cache_dir = "/home/gitlab-runner/cache" [runners.custom_build_dir] [runners.cache] [runners.cache.s3] [runners.cache.gcs] [runners.custom] prepare_exec = "/opt/libvirt-driver/prepare.sh" # Path to a bash script to create VM. run_exec = "/opt/libvirt-driver/run.sh" # Path to a bash script to run script inside of VM over ssh. cleanup_exec = "/opt/libvirt-driver/cleanup.sh" # Path to a bash script to delete VM and disks. ``` ## Base {#base} 各ステージ([prepare](#prepare) 、[run](#run) 、および[cleanup](#cleanup))は、他のスクリプト全体で使用される変数を生成するために、以下のベーススクリプトを使用します。 このスクリプトが他のスクリプトと同じディレクトリにあることが重要です。この場合、`/opt/libvirt-driver/`です。 ```shell #!/usr/bin/env bash # /opt/libvirt-driver/base.sh VM_IMAGES_PATH="/var/lib/libvirt/images" BASE_VM_IMAGE="$VM_IMAGES_PATH/gitlab-runner-base.qcow2" VM_ID="runner-$CUSTOM_ENV_CI_RUNNER_ID-project-$CUSTOM_ENV_CI_PROJECT_ID-concurrent-$CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID-job-$CUSTOM_ENV_CI_JOB_ID" VM_IMAGE="$VM_IMAGES_PATH/$VM_ID.qcow2" _get_vm_ip() { virsh -q domifaddr "$VM_ID" | awk '{print $4}' | sed -E 's|/([0-9]+)?$||' } ``` ## Prepare {#prepare} 準備スクリプト: - ディスクを新しいパスにコピーします。 - コピーされたディスクから新しいVMをインストールします。 - VMがIPを取得するのを待ちます。 - VMでSSHが応答するのを待ちます。 ```shell #!/usr/bin/env bash # /opt/libvirt-driver/prepare.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. set -eo pipefail # trap any error, and mark it as a system failure. trap "exit $SYSTEM_FAILURE_EXIT_CODE" ERR # Copy base disk to use for Job. qemu-img create -f qcow2 -b "$BASE_VM_IMAGE" "$VM_IMAGE" -F qcow2 # Install the VM # To boot VM in UEFI mode, add: --boot uefi virt-install \ --name "$VM_ID" \ --os-variant debian11 \ --disk "$VM_IMAGE" \ --import \ --vcpus=2 \ --ram=2048 \ --network default \ --graphics none \ --noautoconsole # Wait for VM to get IP echo 'Waiting for VM to get IP' for i in $(seq 1 300); do VM_IP=$(_get_vm_ip) if [ -n "$VM_IP" ]; then echo "VM got IP: $VM_IP" break fi if [ "$i" == "300" ]; then echo 'Waited 300 seconds for VM to start, exiting...' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done # Wait for ssh to become available echo "Waiting for sshd to be available" for i in $(seq 1 300); do if ssh -i /root/.ssh/id_rsa -o StrictHostKeyChecking=no gitlab-runner@$VM_IP >/dev/null 2>/dev/null; then break fi if [ "$i" == "300" ]; then echo 'Waited 300 seconds for sshd to start, exiting...' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done ``` ## Run {#run} これにより、SSHを介して`STDIN`経由でスクリプトのコンテンツをVMに送信することにより、GitLab Runnerによって生成されたスクリプトが実行されます。 ```shell #!/usr/bin/env bash # /opt/libvirt-driver/run.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. VM_IP=$(_get_vm_ip) ssh -i /root/.ssh/id_rsa -o StrictHostKeyChecking=no gitlab-runner@$VM_IP /bin/bash < "${1}" if [ $? -ne 0 ]; then # Exit using the variable, to make the build as failure in GitLab # CI. exit "$BUILD_FAILURE_EXIT_CODE" fi ``` ## Cleanup {#cleanup} このスクリプトは、VMを削除し、ディスクを削除します。 ```shell #!/usr/bin/env bash # /opt/libvirt-driver/cleanup.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base script. set -eo pipefail # Destroy VM and wait 300 second. for i in $(seq 1 300); do virsh destroy "$VM_ID" >/dev/null 2>&1 if [[ "$(virsh domstate "$VM_ID" 2>/dev/null | tr '[:upper:]' '[:lower:]')" =~ shut\ off|destroyed|^$ ]]; then break fi if [ $i -eq 300 ]; then exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1 done # Undefine VM. virsh undefine "$VM_ID" || virsh undefine "$VM_ID" --nvram # Delete VM disk. if [ -f "$VM_IMAGE" ]; then rm "$VM_IMAGE" fi ``` ================================================ FILE: docs-locale/ja-jp/executors/custom_examples/lxd.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: カスタムexecutorでLXDを使用する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} この例では、LXDを使用してビルドごとにコンテナを作成し、後でそれを削除します。 この例では、各ステージングにbashスクリプトを使用します。独自のイメージを指定できます。これは[CI_JOB_IMAGE](https://docs.gitlab.com/ci/variables/predefined_variables/)として公開されます。この例では、簡単にするために`ubuntu:22.04`イメージを使用します。複数のイメージをサポートする場合は、executorを変更する必要があります。 これらのスクリプトには、次の依存関係があります: - [LXD](https://ubuntu.com/lxd) - [GitLab Runner](../../install/linux-manually.md) ## 設定 {#configuration} ```toml [[runners]] name = "lxd-driver" url = "https://gitlab.example.com" token = "xxxxxxxxxxx" executor = "custom" builds_dir = "/builds" cache_dir = "/cache" [runners.custom] prepare_exec = "/opt/lxd-driver/prepare.sh" # Path to a bash script to create lxd container and download dependencies. run_exec = "/opt/lxd-driver/run.sh" # Path to a bash script to run script inside the container. cleanup_exec = "/opt/lxd-driver/cleanup.sh" # Path to bash script to delete container. ``` ## ベース {#base} 各ステージングの[prepare](#prepare) 、[run](#run) 、[cleanup](#cleanup)では、このスクリプトを使用して、スクリプト全体で使用される変数を生成します。 このスクリプトは、他のスクリプトと同じディレクトリ(この場合は`/opt/lxd-driver/`)に配置されていることが重要です。 ```shell #!/usr/bin/env bash # /opt/lxd-driver/base.sh CONTAINER_ID="runner-$CUSTOM_ENV_CI_RUNNER_ID-project-$CUSTOM_ENV_CI_PROJECT_ID-concurrent-$CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID-$CUSTOM_ENV_CI_JOB_ID" ``` ## 準備 {#prepare} prepareスクリプトは、次の処理を実行します: - 同じ名前のコンテナが実行中の場合、そのコンテナを削除します。 - コンテナを起動し、起動するまで待ちます。 - [前提となる依存関係](../custom.md#prerequisite-software-for-running-a-job)をインストールします。 ```shell #!/usr/bin/env bash # /opt/lxd-driver/prepare.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. set -eo pipefail # trap any error, and mark it as a system failure. trap "exit $SYSTEM_FAILURE_EXIT_CODE" ERR start_container () { if lxc info "$CONTAINER_ID" >/dev/null 2>/dev/null ; then echo 'Found old container, deleting' lxc delete -f "$CONTAINER_ID" fi # The container image is hardcoded, but you can use # the `CI_JOB_IMAGE` predefined variable # https://docs.gitlab.com/ci/variables/predefined_variables/ # which is available under `CUSTOM_ENV_CI_JOB_IMAGE` to allow the # user to specify the image. The rest of the script assumes that # you are running on an ubuntu image so modifications might be # required. lxc launch ubuntu:22.04 "$CONTAINER_ID" # Wait for container to start, we are using systemd to check this, # for the sake of brevity. for i in $(seq 1 10); do if lxc exec "$CONTAINER_ID" -- sh -c "systemctl isolate multi-user.target" >/dev/null 2>/dev/null; then break fi if [ "$i" == "10" ]; then echo 'Waited for 10 seconds to start container, exiting..' # Inform GitLab Runner that this is a system failure, so it # should be retried. exit "$SYSTEM_FAILURE_EXIT_CODE" fi sleep 1s done } install_dependencies () { # Install Git LFS, git comes pre installed with ubuntu image. lxc exec "$CONTAINER_ID" -- sh -c 'curl -s "https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh" | sudo bash' lxc exec "$CONTAINER_ID" -- sh -c "apt-get install -y git-lfs" # Install gitlab-runner binary since we need for cache/artifacts. lxc exec "$CONTAINER_ID" -- sh -c 'curl -L --output /usr/local/bin/gitlab-runner "https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64"' lxc exec "$CONTAINER_ID" -- sh -c "chmod +x /usr/local/bin/gitlab-runner" } echo "Running in $CONTAINER_ID" start_container install_dependencies ``` ## 実行 {#run} これにより、GitLab Runnerによって生成されたスクリプトのコンテンツを`STDIN`経由でコンテナに送信することにより、スクリプトが実行されます。 ```shell #!/usr/bin/env bash # /opt/lxd-driver/run.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. lxc exec "$CONTAINER_ID" /bin/bash < "${1}" if [ $? -ne 0 ]; then # Exit using the variable, to make the build as failure in GitLab # CI. exit $BUILD_FAILURE_EXIT_CODE fi ``` ## クリーンアップ {#cleanup} ビルドが完了したので、コンテナを削除します。 ```shell #!/usr/bin/env bash # /opt/lxd-driver/cleanup.sh currentDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" source ${currentDir}/base.sh # Get variables from base. echo "Deleting container $CONTAINER_ID" lxc delete -f "$CONTAINER_ID" ``` ================================================ FILE: docs-locale/ja-jp/executors/docker.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Docker executor --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、Docker executorを使用してDockerイメージでジョブを実行します。 Docker executorを使用すると、次のことが可能になります。 - 各ジョブで同じビルド環境を維持する。 - イメージを使用してコマンドをローカルでテストする(CIサーバーでジョブを実行する必要はない)。 Docker executorは[Docker Engine](https://www.docker.com/products/container-runtime/)を使用して、個別の隔離されたコンテナ内で各ジョブを実行します。Docker Engineに接続するために、executorは以下を使用します。 - [`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/)で定義するイメージとサービス。 - [`config.toml`](../commands/_index.md#configuration-file)で定義する設定。 `config.toml`でデフォルトのイメージを定義していないなら、RunnerとそのDocker executorを登録することはできません。`.gitlab-ci.yml`で何も定義されていない場合、`config.toml`で定義されているイメージを使用できます。`.gitlab-ci.yml`でイメージが定義されている場合、それは`config.toml`で定義されているイメージをオーバーライドします。 前提条件: - [Dockerをインストールします](https://docs.docker.com/engine/install/)。 ## Docker executorのワークフロー {#docker-executor-workflow} Docker executorは、[Alpine Linux](https://alpinelinux.org/)をベースとするDockerイメージを使用します。このイメージには、準備、ジョブ実行前、およびジョブ実行後のステップを実行するためのツールが含まれています。特別なDockerイメージの定義を確認するには、[GitLab Runnerリポジトリ](https://gitlab.com/gitlab-org/gitlab-runner/-/tree/v13.4.1/dockerfiles/runner-helper)を参照してください。 Docker executorは、ジョブを複数のステップに分割します。 1. **準備**: [サービス](https://docs.gitlab.com/ci/yaml/#services)を作成して開始します。 1. **ジョブ実行前**: クローン、[キャッシュ](https://docs.gitlab.com/ci/yaml/#cache)の復元、および前のステージからの[アーティファクト](https://docs.gitlab.com/ci/yaml/#artifacts)のダウンロードを行います。特別なDockerイメージで実行されます。 1. **ジョブ**: Runner用に設定したDockerイメージでビルドを実行します。 1. **ジョブ実行後**: キャッシュの作成、GitLabへのアーティファクトのアップロードを実行します。特別なDockerイメージで実行されます。 ## サポートされている設定 {#supported-configurations} Docker executorは以下の設定をサポートしています。 Windows設定に関する既知のイシューと追加の要件については、[Windowsコンテナを使用する](#use-windows-containers)を参照してください。 | Runnerがインストールされている場所: | executor: | コンテナの実行: | |-------------------------|------------------|-----------------------| | Windows | `docker-windows` | Windows | | Windows | `docker` | Linux | | Linux | `docker` | Linux | | macOS | `docker` | Linux | 以下の設定はサポート**されていません**。 | Runnerがインストールされている場所: | executor: | コンテナの実行: | |-------------------------|------------------|-----------------------| | Linux | `docker-windows` | Linux | | Linux | `docker` | Windows | | Linux | `docker-windows` | Windows | | Windows | `docker` | Windows | | Windows | `docker-windows` | Linux | {{< alert type="note" >}} GitLab Runnerは、Docker Engine API [v1.25](https://docs.docker.com/reference/api/engine/version/v1.25/)を使用してDocker Engineと通信します。つまり、Linuxサーバーで[サポートされる最小バージョン](https://docs.docker.com/reference/api/engine/#api-version-matrix)のDockerは`1.13.0`です。Windows Serverでは、Windows Serverのバージョンを識別するために、[これよりも新しいバージョンが必要です](#supported-docker-versions)。 {{< /alert >}} ## Docker executorを使用する {#use-the-docker-executor} Docker executorを使用するには、`config.toml`でDockerをexecutorとして手動で定義するか、[`gitlab-runner register --executor "docker"`](../register/_index.md#register-with-a-runner-authentication-token)コマンドを使用して自動的に定義します。 次に示すのは、Dockerをexecutorとして定義している設定例です。これらの値の詳細については、[高度な設定](../configuration/advanced-configuration.md)を参照してください ```toml concurrent = 4 [[runners]] name = "myRunner" url = "https://gitlab.com/ci" token = "......" executor = "docker" [runners.docker] tls_verify = true image = "my.registry.tld:5000/alpine:latest" privileged = false disable_entrypoint_overwrite = false oom_kill_disable = false disable_cache = false volumes = [ "/cache", ] shm_size = 0 allowed_pull_policies = ["always", "if-not-present"] allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] [runners.docker.volume_driver_ops] "size" = "50G" ``` ## イメージとサービスを設定する {#configure-images-and-services} 前提条件: - ジョブが実行されるイメージには、オペレーティングシステムの`PATH`に動作するShellが必要です。サポートされているShellは次のとおりです。 - Linux: - `sh` - `bash` - PowerShell Core(`pwsh`)。[13.9で導入されました](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4021)。 - Windows: - PowerShell(`powershell`) - PowerShell Core(`pwsh`)。[13.6で導入されました](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/13139)。 Docker executorを設定するには、[`.gitlab-ci.yml`](https://docs.gitlab.com/ci/yaml/)と[`config.toml`](../commands/_index.md#configuration-file)でDockerイメージとサービスを定義します。 次のキーワードを使用します。 - `image`: Runnerがジョブを実行するために使用するDockerイメージの名前。 - ローカルDocker Engineのイメージ、またはDocker Hubの任意のイメージを入力します。詳細については、[Dockerのドキュメント](https://docs.docker.com/get-started/introduction/)を参照してください。 - イメージのバージョンを定義するには、コロン(`:`)を使用してタグを追加します。タグを指定しない場合、Dockerはこのバージョンとして`latest`を使用します。 - `services`: 別のコンテナを作成し、`image`にリンクする追加のイメージ。サービスの種類に関する詳細については、[サービス](https://docs.gitlab.com/ci/services/)を参照してください。 ### `.gitlab-ci.yml`でイメージとサービスを定義する {#define-images-and-services-in-gitlab-ciyml} Runnerがすべてのジョブに使用するイメージと、ビルド時に使用する一連のサービスを定義します。 例: ```yaml image: ruby:3.3 services: - postgres:9.3 before_script: - bundle install test: script: - bundle exec rake spec ``` ジョブごとに異なるイメージとサービスを定義するには、次のようにします。 ```yaml before_script: - bundle install test:3.3: image: ruby:3.3 services: - postgres:9.3 script: - bundle exec rake spec test:3.4: image: ruby:3.4 services: - postgres:9.4 script: - bundle exec rake spec ``` `.gitlab-ci.yml`で`image`を定義しない場合、Runnerは`config.toml`で定義された`image`を使用します。 ### `config.toml`でイメージとサービスを定義する {#define-images-and-services-in-configtoml} Runnerが実行するすべてのジョブにイメージとサービスを追加するには、`config.toml`の`[runners.docker]`を更新します。 デフォルトの場合、`.gitlab-ci.yml`で定義されている`image`がDocker executorで使用されます。`.gitlab-ci.yml`で定義していない場合、Runnerは`config.toml`で定義されているイメージを使用します。 例: ```toml [runners.docker] image = "ruby:3.3" [[runners.docker.services]] name = "mysql:latest" alias = "db" [[runners.docker.services]] name = "redis:latest" alias = "cache" ``` この例では、[テーブル構文の配列](https://toml.io/en/v0.4.0#array-of-tables)を使用しています。 ### プライベートレジストリのイメージを定義する {#define-an-image-from-a-private-registry} 前提条件: - プライベートレジストリのイメージにアクセスするには、[GitLab Runnerを認証する](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry)必要があります。 プライベートレジストリのイメージを定義するには、`.gitlab-ci.yml`でレジストリ名とイメージを指定します。 例: ```yaml image: my.registry.tld:5000/namespace/image:tag ``` この例では、GitLab Runnerはレジストリ`my.registry.tld:5000`でイメージ`namespace/image:tag`を検索します。 ## ネットワーク設定 {#network-configurations} サービスをCI/CDジョブに接続するには、ネットワークを設定する必要があります。 ネットワークを設定するには、次のいずれかを実行します。 - 推奨。ジョブごとにネットワークを作成するようにRunnerを設定します。 - コンテナリンクを定義します。コンテナリンクは、Dockerのレガシー機能です。 ### ジョブごとにネットワークを作成する {#create-a-network-for-each-job} ジョブごとにネットワークを作成するようにRunnerを設定できます。 このネットワーキングモードを有効にすると、Runnerはジョブごとにユーザー定義のDockerブリッジネットワークを作成して使用します。Docker環境変数は、コンテナ間で共有されません。ユーザー定義のブリッジネットワークの詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/network/drivers/bridge/)を参照してください。 このネットワーキングモードを使用するには、`config.toml`の機能フラグまたは環境変数で`FF_NETWORK_PER_BUILD`を有効にします。 `network_mode`は設定しないでください。 例: ```toml [[runners]] (...) executor = "docker" environment = ["FF_NETWORK_PER_BUILD=1"] ``` または: ```toml [[runners]] (...) executor = "docker" [runners.feature_flags] FF_NETWORK_PER_BUILD = true ``` デフォルトのDockerアドレスプールを設定するには、[`dockerd`](https://docs.docker.com/reference/cli/dockerd/)で`default-address-pool`を使用します。CIDR範囲がネットワークですでに使用されている場合、Dockerネットワークは、ホスト上の他のネットワーク(他のDockerネットワークを含む)と競合する可能性があります。 この機能は、IPv6を有効にしてDockerデーモンが設定されている場合にのみ機能します。IPv6サポートを有効にするには、Docker設定で`enable_ipv6`を`true`に設定します。詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/daemon/ipv6/)を参照してください。 Runnerは、ジョブコンテナを解決するために`build`エイリアスを使用します。 {{< alert type="note" >}} この機能を使用すると、Docker-in-Docker(`dind`)サービスでDNSが正しく機能しない場合があります。 この動作は、ネットワークを指定した場合に`dind`コンテナがカスタムDNSエントリを継承しないという、[Docker/Moby](https://github.com/moby/moby/issues/20037#issuecomment-181659049)の問題によるものです。 回避策として、`dind`サービスに対して、カスタムDNS設定を手動で指定してください。たとえば、カスタムDNSサーバーが`1.1.1.1`の場合、Dockerの内部DNSサービスである`127.0.0.11`を使用できます。 ```yaml services: - name: docker:dind command: [--dns=127.0.0.11, --dns=1.1.1.1] ``` このアプローチでは、コンテナが同じネットワーク上のサービスを解決できるようになります。 {{< /alert >}} #### Runnerがジョブごとにネットワークを作成する仕組み {#how-the-runner-creates-a-network-for-each-job} ジョブが開始されると、Runnerは次の処理を行います。 1. Dockerコマンド`docker network create `と同様に、ブリッジネットワークを作成します。 1. サービスとコンテナをブリッジネットワークに接続します。 1. ジョブの最後にネットワークを削除します。 ジョブを実行しているコンテナと、サービスを実行しているコンテナが、互いのホスト名とエイリアスを解決します。この機能は[Dockerによって提供](https://docs.docker.com/engine/network/drivers/bridge/#differences-between-user-defined-bridges-and-the-default-bridge)されます。 ### コンテナリンクを使用してネットワークを設定する {#configure-a-network-with-container-links} GitLab Runner 18.7.0以前は、デフォルトのDocker `bridge`と[レガシーコンテナリンク](https://docs.docker.com/engine/network/links/)を使用して、ジョブコンテナとサービスをリンクしていました。Dockerはリンク機能を非推奨にしたため、GitLab Runner 18.7.0以降では、サービスのエイリアスがDockerの`extra_hosts`機能を使用して解決されるようにすることで、レガシーコンテナリンクの動作がエミュレートされます。このネットワークモードは、[`FF_NETWORK_PER_BUILD`](#create-a-network-for-each-job)が無効になっている場合のデフォルトです。 {{< alert type="note" >}} GitLab Runnerのエミュレートされたリンクの動作は、[レガシーコンテナリンク](https://docs.docker.com/engine/network/links/)とはわずかに異なります: - `icc`を無効にすると、コンテナ間通信が無効になり、コンテナが相互に通信できなくなります。 - リンクされたコンテナの環境変数は存在しなくなりました(`_PORT__`)。 {{< /alert >}} ネットワークを設定するには、`config.toml`ファイルで[ネットワーキング](https://docs.docker.com/engine/containers/run/#network-settings)モードを指定します。 - `bridge`: ブリッジネットワークを使用します。デフォルト。 - `host`: コンテナ内でホストのネットワークスタックを使用します。 - `none`: ネットワーキングなし。推奨されません。 例: ```toml [[runners]] (...) executor = "docker" [runners.docker] network_mode = "bridge" ``` 他の`network_mode`値を使用すると、ビルドコンテナが接続する既存のDockerネットワークの名前として扱われます。 Dockerは名前の解決中にサービスコンテナのホスト名とエイリアスを使用して、コンテナ内の`/etc/hosts`ファイルを更新します。ただし、サービスコンテナはコンテナ名を解決**できません**。コンテナ名を解決するには、ジョブごとにネットワークを作成する必要があります。 リンクされたコンテナは、その環境変数を共有します。 #### 作成されたネットワークのMTUを上書きする {#overriding-the-mtu-of-the-created-network} OpenStackの仮想マシンなどの一部の環境では、カスタムMTUが必要です。Dockerデーモンは、`docker.json`のMTUに従いません([Mobyイシュー34981](https://github.com/moby/moby/issues/34981)を参照)。Dockerデーモンが新しく作成されたネットワークに正しいMTUを使用できるようにするために、`config.toml`で`network_mtu`を有効な値に設定できます。上書きを有効にするには、[`FF_NETWORK_PER_BUILD`](#create-a-network-for-each-job)も有効にする必要があります。 次の設定では、各ジョブ用に作成されたネットワークのMTUが`1402`に設定されます。この値は、特定の環境要件に合わせて調整してください。 ```toml [[runners]] (...) executor = "docker" [runners.docker] network_mtu = 1402 [runners.feature_flags] FF_NETWORK_PER_BUILD = true ``` ## Dockerイメージとサービスを制限する {#restrict-docker-images-and-services} Dockerイメージとサービスを制限するには、`allowed_images`および`allowed_services`パラメータでワイルドカードパターンを指定します。構文の詳細については、[doublestarのドキュメント](https://github.com/bmatcuk/doublestar)を参照してください。 たとえば、プライベートDockerレジストリのイメージのみを許可するには、次のようにします。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] ``` プライベートDockerレジストリのイメージのリストに制限するには、次のようにします。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["my.registry.tld:5000/ruby:*", "my.registry.tld:5000/node:*"] allowed_services = ["postgres:9.4", "postgres:latest"] ``` Kaliなどの特定のイメージを除外するには、次のようにします。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_images = ["**", "!*/kali*"] ``` ## サービスホスト名にアクセスする {#access-services-hostnames} サービスホスト名にアクセスするには、`.gitlab-ci.yml`で`services`にサービスを追加します。 たとえば、Wordpressインスタンスを使用してアプリケーションとのAPIインテグレーションをテストするには、[tutum/wordpress](https://hub.docker.com/r/tutum/wordpress/)をサービスイメージとして使用します。 ```yaml services: - tutum/wordpress:latest ``` ジョブの実行時に`tutum/wordpress`サービスが開始されます。ホスト名`tutum__wordpress`および`tutum-wordpress`の下のビルドコンテナからこのサービスにアクセスできます。 指定されたサービスエイリアスの他に、Runnerはサービスイメージの名前をエイリアスとしてサービスコンテナに割り当てます。これらのエイリアスはどれでも使用できます。 Runnerは以下のルールに従って、イメージ名に基づいてエイリアスを作成します。 - `:`より後のすべての文字が削除されます。 - 1番目のエイリアスでは、スラッシュ(`/`)が2つのアンダースコア(`__`)に置き換えられます。 - 2番目のエイリアスでは、スラッシュ(`/`)が1つのダッシュ(`-`)に置き換えられます。 プライベートサービスイメージを使用する場合、Runnerは指定されたポートをすべて削除し、ルールを適用します。サービス`registry.gitlab-wp.com:4999/tutum/wordpress`の場合、ホスト名は`registry.gitlab-wp.com__tutum__wordpress`および`registry.gitlab-wp.com-tutum-wordpress`になります。 ## サービスを設定する {#configuring-services} データベース名を変更する場合、またはアカウント名を設定する場合には、サービスに環境変数を定義します。 Runnerが変数を渡すときには、次のように渡されます。 - 変数はすべてのコンテナに渡されます。Runnerは、特定のコンテナに変数を渡すことができません。 - セキュア変数はビルドコンテナに渡されます。 設定変数の詳細については、対応するDocker Hubページで提供される各イメージのドキュメントを参照してください。 ### RAMにディレクトリをマウントする {#mount-a-directory-in-ram} `tmpfs`オプションを使用して、RAMにディレクトリをマウントできます。これにより、データベースなどのI/O関連の処理が多い場合にテストに必要な時間を短縮できます。 Runner設定で`tmpfs`オプションと`services_tmpfs`オプションを使用する場合は、複数のパスをそれぞれ専用のオプションで指定できます。詳細については、[Dockerのドキュメント](https://docs.docker.com/reference/cli/docker/container/run/#tmpfs)を参照してください。 たとえば、公式のMySQLコンテナのデータディレクトリをRAMにマウントするには、`config.toml`を設定します。 ```toml [runners.docker] # For the main container [runners.docker.tmpfs] "/var/lib/mysql" = "rw,noexec" # For services [runners.docker.services_tmpfs] "/var/lib/mysql" = "rw,noexec" ``` ### サービスでディレクトリをビルドする {#building-a-directory-in-a-service} GitLab Runnerは、すべての共有サービスに`/builds`ディレクトリをマウントします。 さまざまなサービスの使用法の詳細については、以下を参照してください。 - [PostgreSQLを使用する](https://docs.gitlab.com/ci/services/postgres/) - [MySQLを使用する](https://docs.gitlab.com/ci/services/mysql/) ### GitLab Runnerがサービスのヘルスチェックを実行する仕組み {#how-gitlab-runner-performs-the-services-health-check} {{< history >}} - GitLab 16.0で複数のポートチェックが[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4079)されました。 {{< /history >}} サービスの開始後、GitLab Runnerはサービスが応答するまで待機します。Docker executorは、サービスコンテナで公開されているサービスポートへのTCP接続を開こうとします。 - GitLab 15.11以前では、最初に公開されたポートのみがチェックされます。 - GitLab 16.0以降では、最初に公開された20個のポートがチェックされます。 特定のポートでヘルスチェックを実行するには、`HEALTHCHECK_TCP_PORT`サービス変数を使用できます。 ```yaml job: services: - name: mongo variables: HEALTHCHECK_TCP_PORT: "27017" ``` これがどのように実装されているかを確認するには、ヘルスチェックの[Goコマンド](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/commands/helpers/health_check.go)を使用します。 ## Dockerドライバーオペレーションを指定する {#specify-docker-driver-operations} ビルドのボリュームを作成するときにDockerボリュームドライバーに渡す引数を指定します。たとえば、他のすべてのドライバー固有のオプションに加えて、これらの引数を使用して、各ビルドが実行されるスペースを制限できます。次の例は、各ビルドが消費できるスペースの制限が50 GBに設定されている`config.toml`を示しています。 ```toml [runners.docker] [runners.docker.volume_driver_ops] "size" = "50G" ``` ## ホストデバイスを使用する {#using-host-devices} {{< history >}} - GitLab 17.10で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/6208)されました。 {{< /history >}} GitLab Runnerホスト上のハードウェアデバイスを、ジョブを実行するコンテナに対して公開できます。このためには、Runnerの`devices`オプションと`services_devices`オプションを設定します。 - デバイスを`build`コンテナと[ヘルパー](../configuration/advanced-configuration.md#helper-image)コンテナに公開するには、`devices`オプションを使用します。 - デバイスをサービスコンテナに公開するには、`services_devices`オプションを使用します。サービスコンテナのデバイスアクセスを特定のイメージに制限するには、正確なイメージ名またはglobパターンを使用します。このアクションにより、ホストシステムデバイスへの直接アクセスが防止されます。 デバイスアクセスの詳細については、[Dockerのドキュメント](https://docs.docker.com/reference/cli/docker/container/run/#device)を参照してください。 ### ビルドコンテナの例 {#build-container-example} この例では、`config.toml`セクションで`/dev/bus/usb`をビルドコンテナに公開します。この設定により、パイプラインはホストマシンに接続されたUSBデバイス([Android Debug Bridge(`adb`)](https://developer.android.com/tools/adb)を介して制御されるAndroidスマートフォンなど)にアクセスできます。 ビルドジョブコンテナがホストUSBデバイスに直接アクセスできるため、同じハードウェアにアクセスすると、同時パイプライン実行が互いに競合する可能性があります。このような競合を防ぐには、[`resource_group`](https://docs.gitlab.com/ci/yaml/#resource_group)を使用します。 ```toml [[runners]] name = "hardware-runner" url = "https://gitlab.com" token = "__REDACTED__" executor = "docker" [runners.docker] # All job containers may access the host device devices = ["/dev/bus/usb"] ``` ### プライベートレジストリの例 {#private-registry-example} この例は、プライベートDockerレジストリから`/dev/kvm`デバイスと`/dev/dri`デバイスをコンテナイメージに公開する方法を示します。これらのデバイスは通常、ハードウェアアクセラレーションによる仮想化とレンダリングに使用されます。ハードウェアリソースへの直接アクセスをユーザーに付与することに伴うリスクを軽減するには、デバイスアクセスを、`myregistry:5000/emulator/*`ネームスペース内の信頼できるイメージに制限します。 ```toml [runners.docker] [runners.docker.services_devices] # Only images from an internal registry may access the host devices "myregistry:5000/emulator/*" = ["/dev/kvm", "/dev/dri"] ``` {{< alert type="warning" >}} イメージ名`**/*`は、任意のイメージにデバイスを公開する可能性があります。 {{< /alert >}} ## コンテナのビルドとキャッシュ用のディレクトリを設定する {#configure-directories-for-the-container-build-and-cache} コンテナ内でデータが保存される場所を定義するには、`config.toml`の`[[runners]]`セクションで`/builds`ディレクトリと`/cache`ディレクトリを設定します。 `/cache`ストレージパスを変更する場合は、パスを永続としてマークするために、`config.toml`の`[runners.docker]`セクションで`volumes = ["/my/cache/"]`にこのパスを定義する必要があります。 デフォルトでは、Docker executorは次のディレクトリにビルドとキャッシュを保存します。 - ビルド: `/builds//` - キャッシュ: コンテナ内の`/cache` ## Dockerキャッシュをクリアする {#clear-the-docker-cache} Runnerによって作成された未使用のコンテナとボリュームを削除するには、[`clear-docker-cache`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/packaging/root/usr/share/gitlab-runner/clear-docker-cache)を使用します。 オプションのリストを確認するには、`help`オプションを指定してスクリプトを実行します。 ```shell clear-docker-cache help ``` デフォルトのオプションは`prune-volumes`です。これにより、未使用のコンテナ(ダングリングおよび未参照)とボリュームがすべて削除されます。 キャッシュストレージを効率的に管理するには、次の操作を行う必要があります。 - `cron`を使用して`clear-docker-cache`を定期的に実行します(たとえば週に1回)。 - ディスクスペースを回収する際に、パフォーマンスのためにキャッシュに最近のコンテナをいくつか保持します。 どのオブジェクトが削除されるかは`FILTER_FLAG`環境変数によって制御されます。その使用例については、[Docker imageプルーニング](https://docs.docker.com/reference/cli/docker/image/prune/#filter)のドキュメントを参照してください。 ## Dockerビルドイメージをクリアする {#clear-docker-build-images} DockerイメージはGitLab Runnerによってタグ付けされていないため、[`clear-docker-cache`](https://gitlab.com/gitlab-org/gitlab-runner/blob/main/packaging/root/usr/share/gitlab-runner/clear-docker-cache)スクリプトはDockerイメージを削除しません。 Dockerビルドイメージをクリアするには、次の手順に従います。 1. 回収できるディスクスペースを確認します。 ```shell clear-docker-cache space Show docker disk usage ---------------------- TYPE TOTAL ACTIVE SIZE RECLAIMABLE Images 14 9 1.306GB 545.8MB (41%) Containers 19 18 115kB 0B (0%) Local Volumes 0 0 0B 0B Build Cache 0 0 0B 0B ``` 1. 未使用のコンテナ、ネットワーク、イメージ(ダングリングおよび未参照)、およびタグ付けされていないボリュームをすべて削除するには、[`docker system prune`](https://docs.docker.com/reference/cli/docker/system/prune/)を実行します。 ## 永続ストレージ {#persistent-storage} Docker executorは、コンテナの実行時に永続ストレージを提供します。`volumes =`で定義されているすべてのディレクトリは、ビルド間で維持されます。 `volumes`ディレクティブは、次の種類のストレージをサポートしています。 - 動的ストレージの場合は``を使用します。``は、そのプロジェクトで同じ同時実行ジョブの後続の実行間で維持されます。`runners.docker.cache_dir`を設定しない場合、データはDockerボリュームに永続的に保存されます。そうでない場合は、ホスト上の設定されたディレクトリに永続的に保存されます(ビルドコンテナにマウントされます)。 ボリュームベースの永続ストレージのボリューム名: - 18.4.0より以前のGitLab Runnerの場合: `runner--project--concurrent--cache-` - GitLab Runner 18.4.0以降の場合: `runner--cache-` ボリューム名で人間が読めなくなったデータは、ボリュームのラベルに移動されます。 ホストベースの永続ストレージのホストディレクトリ: - 18.4.0より以前のGitLab Runnerの場合: `/runner--project--concurrent-/` - GitLab Runner 18.4.0以降の場合: `/runner-/` 変数部分の説明: - ``: Runnerのトークンの短縮バージョン(最初の8文字) - ``: GitLabプロジェクトのID - ``: Runnerのインデックス(同じプロジェクトのビルドを同時に実行しているすべてのRunnerのリストから) - ``: コンテナ内のパスのMD5サム - ``: 次のデータのハッシュ: - Runnerのトークン - RunnerのシステムID - `` - `` - ``: 値は、保護されていないブランチのビルドの場合は空で、保護されたブランチのビルドの場合は`-protected`です - ``: `runners.docker.cache_dir`の設定 - ホストにバインドされたストレージの場合は、`:[:]`を使用します。GitLab Runnerは、ホストシステムの``に``をバインドします。オプションの``は、このストレージが読み取り専用か読み取り/書き込み(デフォルト)かを指定します。 {{< alert type="warning" >}} GitLab Runner 18.4.0では、動的ストレージのソースの命名(上記参照)が、Dockerボリュームベースおよびホストディレクトリベースの永続ストレージの両方で変更されました。18.4.0にアップグレードすると、GitLab Runnerは以前のRunnerバージョンのキャッシュされたデータを無視し、新しいDockerボリュームまたは新しいホストディレクトリを介して、オンデマンドで新しい動的ストレージを作成します。 動的ストレージとは対照的に、ホストバインドストレージ(``設定を使用)は影響を受けません。 {{< /alert >}} ### ビルド用の永続ストレージ {#persistent-storage-for-builds} `/builds`ディレクトリをホストにバインドされたストレージにすると、ビルドは`/builds////`に保存されます。 - ``は、Runnerのトークンの短縮バージョンです(最初の8文字)。 - ``は、プロジェクトのコンテキストで特定のRunnerのローカルジョブIDを識別する一意の番号です。 ## IPCモード {#ipc-mode} Docker executorでは、コンテナのIPCネームスペースを他の場所と共有できます。これは`docker run --ipc`フラグにマップされます。IPC設定の詳細については、[Dockerのドキュメント](https://docs.docker.com/engine/containers/run/#ipc-settings---ipc)を参照してください。 ## 特権モード {#privileged-mode} Docker executorは、ビルドコンテナのファインチューニングを可能にするさまざまなオプションをサポートしています。このようなオプションの1つが[`privileged`モード](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities)です。 ### 特権モードでDocker-in-Dockerを使用する {#use-docker-in-docker-with-privileged-mode} 設定された`privileged`フラグがビルドコンテナとすべてのサービスに渡されます。このフラグを使用すると、Docker-in-Dockerアプローチを使用できます。 まず、`privileged`モードで実行するようにRunner(`config.toml`)を設定します。 ```toml [[runners]] executor = "docker" [runners.docker] privileged = true ``` 次に、Docker-in-Dockerコンテナを使用するためのビルドスクリプト(`.gitlab-ci.yml`)を作成します。 ```yaml image: docker:git services: - docker:dind build: script: - docker build -t my-image . - docker push my-image ``` {{< alert type="warning" >}} 特権モードで実行されるコンテナには、セキュリティ上のリスクがあります。コンテナが特権モードで実行されている場合、コンテナセキュリティメカニズムを無効にし、ホストを特権エスカレーションに公開します。特権モードでコンテナを実行すると、コンテナのブレイクアウトが発生する可能性があります。詳細については、[ランタイム特権とLinux機能](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities)に関するDockerドキュメントを参照してください。 {{< /alert >}} 次のようなエラーを回避するには、[TLSを使用してDocker-in-Dockerを設定するか、またはTLSを無効にする](https://docs.gitlab.com/ci/docker/using_docker_build/#use-the-docker-executor-with-docker-in-docker)必要があります。 ```plaintext Cannot connect to the Docker daemon at tcp://docker:2375. Is the docker daemon running? ``` ### 制限付き特権モードでルートレスDocker-in-Dockerを使用する {#use-rootless-docker-in-docker-with-restricted-privileged-mode} このバージョンではDocker-in-Dockerルートレスイメージのみを特権モードでサービスとして実行できます。 `services_privileged`および`allowed_privileged_services`設定パラメータは、特権モードで実行できるコンテナを制限します。 制限付き特権モードでルートレスDocker-in-Dockerを使用するには、次の手順に従います。 1. `config.toml`で、`services_privileged`と`allowed_privileged_services`を使用するようにRunnerを設定します。 ```toml [[runners]] executor = "docker" [runners.docker] services_privileged = true allowed_privileged_services = ["docker.io/library/docker:*-dind-rootless", "docker.io/library/docker:dind-rootless", "docker:*-dind-rootless", "docker:dind-rootless"] ``` 1. `.gitlab-ci.yml`で、Docker-in-Dockerルートなしコンテナを使用するようにビルドスクリプトを編集します。 ```yaml image: docker:git services: - docker:dind-rootless build: script: - docker build -t my-image . - docker push my-image ``` 特権モードで実行できるのは、`allowed_privileged_services`にリストされているDocker-in-Dockerルートレスイメージのみです。ジョブとサービスのその他のコンテナはすべて、非特権モードで実行されます。 これらは非ルートとして実行されるため、Docker-in-DockerルートレスやBuildKitルートレスなどの特権モードのイメージとともに使用することは_ほぼ安全です_。 セキュリティの問題の詳細については、[Docker executorのセキュリティリスク](../security/_index.md#usage-of-docker-executor)を参照してください。 ## Docker ENTRYPOINTを設定する {#configure-a-docker-entrypoint} デフォルトの場合、Docker executorは[Dockerイメージの`ENTRYPOINT`](https://docs.docker.com/engine/containers/run/#entrypoint-default-command-to-execute-at-runtime)をオーバーライドしません。ジョブスクリプトを実行するコンテナを起動するために、`sh`または`bash`を[`COMMAND`](https://docs.docker.com/engine/containers/run/#cmd-default-command-or-options)として渡します。 ジョブを実行できるようにするには、そのDockerイメージが次の処理を行う必要があります。 - `sh`または`bash`と`grep`を提供する。 - 引数として`sh`/`bash`が渡されるとShellを起動する`ENTRYPOINT`を定義する。 Docker Executorは、次のコマンドと同等のコマンドでジョブのコンテナを実行します。 ```shell docker run sh -c "echo 'It works!'" # or bash ``` Dockerイメージがこのメカニズムをサポートしていない場合は、プロジェクト設定で次のように[イメージのENTRYPOINTをオーバーライドできます](https://docs.gitlab.com/ci/yaml/#imageentrypoint)。 ```yaml # Equivalent of # docker run --entrypoint "" sh -c "echo 'It works!'" image: name: my-image entrypoint: [""] ``` 詳細については、[イメージのエントリポイントをオーバーライドする](https://docs.gitlab.com/ci/docker/using_docker_images/#override-the-entrypoint-of-an-image)と[Dockerでの`CMD`と`ENTRYPOINT`の相互作用の仕組み](https://docs.docker.com/reference/dockerfile/#understand-how-cmd-and-entrypoint-interact)を参照してください。 ### ENTRYPOINTとしてのジョブスクリプト {#job-script-as-entrypoint} `ENTRYPOINT`を使用して、カスタム環境またはセキュアモードでビルドスクリプトを実行するDockerイメージを作成できます。 たとえば、ビルドスクリプトを実行しない`ENTRYPOINT`を使用するDockerイメージを作成できます。代わりにDockerイメージは、定義済みの一連のコマンドを実行して、ディレクトリからDockerイメージをビルドします。[特権モード](#privileged-mode)でビルドコンテナを実行し、Runnerのビルド環境を保護します。 1. 新しいDockerfileを作成します。 ```dockerfile FROM docker:dind ADD / /entrypoint.sh ENTRYPOINT ["/bin/sh", "/entrypoint.sh"] ``` 1. `ENTRYPOINT`として使用されるbashスクリプト(`entrypoint.sh`)を作成します。 ```shell #!/bin/sh dind docker daemon --host=unix:///var/run/docker.sock \ --host=tcp://0.0.0.0:2375 \ --storage-driver=vf & docker build -t "$BUILD_IMAGE" . docker push "$BUILD_IMAGE" ``` 1. イメージをDockerレジストリにプッシュします。 1. `privileged`モードでDocker executorを実行します。`config.toml`で次のように定義します。 ```toml [[runners]] executor = "docker" [runners.docker] privileged = true ``` 1. プロジェクトで次の`.gitlab-ci.yml`を使用します。 ```yaml variables: BUILD_IMAGE: my.image build: image: my/docker-build:image script: - Dummy Script ``` ## Podmanを使用してDockerコマンドを実行する {#use-podman-to-run-docker-commands} {{< history >}} - GitLab 15.3で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27119)されました。 {{< /history >}} LinuxにGitLab Runnerがインストールされている場合、ジョブはPodmanを使用して、DockerをDocker executorのコンテナランタイムに置き換えることができます。 前提条件: - [Podman](https://podman.io/) v4.2.0以降。 - Podmanをexecutorとして使用して[サービス](#services)を実行するには、[`FF_NETWORK_PER_BUILD`機能フラグ](#create-a-network-for-each-job)を有効にします。[Dockerコンテナリンク](https://docs.docker.com/engine/network/links/)はレガシー機能であり、[Podman](https://podman.io/)ではサポートされていません。ネットワークエイリアスを作成するサービスの場合、`podman-plugins`パッケージをインストールする必要があります。 {{< alert type="note" >}} Podmanは、コンテナのDNSサーバーとして`aardvark-dns`を使用します。`aardvark-dns`バージョン1.10.0以前では、CI/CDジョブで散発的なDNS解決の失敗が発生します。新しいバージョンがインストールされていることを確認してください。詳細については、[GitHubイシュー389](https://github.com/containers/aardvark-dns/issues/389)を参照してください。 {{< /alert >}} 1. LinuxホストにGitLab Runnerをインストールします。システムのパッケージマネージャーを使用してGitLab Runnerをインストールした場合、`gitlab-runner`ユーザーが自動的に作成されます。 1. GitLab Runnerを実行するユーザーとしてサインインします。これは、[`pam_systemd`](https://www.freedesktop.org/software/systemd/man/latest/pam_systemd.html)を回避しない方法で行う必要があります。正しいユーザーでSSHを使用できます。これにより、このユーザーとして`systemctl`を実行できるようになります。 1. システムが、[ルートレスPodmanセットアップ](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md)の前提条件を満たしていることを確認します。具体的には、[`/etc/subuid`および`/etc/subgid`にユーザーの正しいエントリがあること](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md#etcsubuid-and-etcsubgid-configuration)を確認します。 1. Linuxホストに[Podmanをインストール](https://podman.io/getting-started/installation)します。 1. Podmanソケットを有効にして起動します。 ```shell systemctl --user --now enable podman.socket ``` 1. Podmanソケットがリッスンしていることを検証します。 ```shell systemctl status --user podman.socket ``` 1. Podman APIへのアクセスに使用されている`Listen`キーのソケット文字列をコピーします。 1. GitLab Runnerユーザーがログアウトした後も、Podmanソケットが利用可能な状態であることを確認します。 ```shell sudo loginctl enable-linger gitlab-runner ``` 1. GitLab Runnerの`config.toml`ファイルを編集し、`[runners.docker]`セクションのhostエントリにソケット値を追加します。例: ```toml [[runners]] name = "podman-test-runner-2025-06-07" url = "https://gitlab.com" token = "TOKEN" executor = "docker" [runners.docker] host = "unix:///run/user/1012/podman/podman.sock" tls_verify = false image = "quay.io/podman/stable" privileged = false ``` {{< alert type="note" >}} 標準のPodmanを使用するには、`privileged = false`を設定します。ジョブ内で[Docker-in-Dockerサービス](#use-docker-in-docker-with-privileged-mode)を実行する必要がある場合にのみ、`privileged = true`を設定してください。 {{< /alert >}} ### Podmanを使用してDockerfileからコンテナイメージをビルドする {#use-podman-to-build-container-images-from-a-dockerfile} 次の例では、Podmanを使用してコンテナイメージをビルドし、このイメージをGitLabコンテナレジストリにプッシュします。 Runnerの`config.toml`でデフォルトコンテナイメージが`quay.io/podman/stable`に設定されているため、CIジョブはそのイメージを使用して、含まれているコマンドを実行します。 ```yaml variables: IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG before_script: - podman login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY oci-container-build: stage: build script: - podman build -t $IMAGE_TAG . - podman push $IMAGE_TAG when: manual ``` ### Buildahを使用してDockerfileからコンテナイメージをビルドする {#use-buildah-to-build-container-images-from-a-dockerfile} 次の例は、Buildahを使用してコンテナイメージをビルドし、このイメージをGitLabコンテナレジストリにプッシュする方法を示しています。 ```yaml image: quay.io/buildah/stable variables: IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG before_script: - buildah login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY oci-container-build: stage: build script: - buildah bud -t $IMAGE_TAG . - buildah push $IMAGE_TAG when: manual ``` ### 既知の問題 {#known-issues} Dockerとは異なり、PodmanはデフォルトでSELinuxポリシーを適用します。多くのパイプラインは問題なく実行されますが、ツールが一時ディレクトリを使用すると、SELinuxコンテキストの継承により失敗する場合があります。 たとえば、次のパイプラインはPodmanでは失敗します: ```yaml testing: image: alpine:3.20 script: - apk add --no-cache python3 py3-pip - pip3 install --target $CI_PROJECT_DIR requests==2.28.2 ``` pipが作業ディレクトリとして`/tmp`を使用するため、失敗が発生します。`/tmp`で作成されたファイルは、そのSELinuxコンテキストを継承します。これにより、コンテナは、これらのファイルが`$CI_PROJECT_DIR`に移動されたときに変更できなくなります。 **Solution:**`runners.docker`セクションの下のRunnerの`config.toml`のボリュームに`/tmp`を追加します: ```toml [[runners]] [runners.docker] volumes = ["/cache", "/tmp"] ``` この追加により、マウントされたディレクトリ全体で一貫したSELinuxコンテキストが確保されます。 #### SELinuxのトラブルシューティング {#troubleshooting-selinux-issues} その他のPodman/SELinuxの問題では、必要な設定の変更を特定するために、追加のトラブルシューティングが必要になる場合があります。 Podman Runnerの問題がSELinuxに関連しているかどうかをテストするには、`runners.docker`セクションの下のRunnerの`config.toml`に、次のディレクティブを一時的に追加します: ```toml [[runners]] [runners.docker] security_opt = ["label:disable"] ``` {{< alert type="warning" >}} この追加により、コンテナ内のSELinuxの適用がオフになります(これはDockerのデフォルトの動作です)。この設定はテスト目的でのみ使用し、セキュリティに影響を与える可能性があるため、永続的なソリューションとしては使用しないでください。 {{< /alert >}} #### SELinux MCSの設定 {#configure-selinux-mcs} SELinuxが一部の書き込み操作(既存のGitリポジトリの再初期化など)をブロックする場合は、Runnerによって起動されたすべてのコンテナでマルチカテゴリセキュリティ(MCS)を強制できます: ```toml [[runners]] [runners.docker] security_opt = ["label=level:s0:c1000"] ``` このオプションではSELinuxは無効になりませんが、コンテナのMCSサービスレベル指標を設定します。このアプローチは、`label:disable`を使用するよりも安全です。 {{< alert type="warning" >}} 同じMCSカテゴリを使用する複数のコンテナは、そのカテゴリでタグ付けされた同じファイルにアクセスできます。 {{< /alert >}} ## ジョブを実行するユーザーを指定する {#specify-which-user-runs-the-job} デフォルトでは、Runnerはコンテナ内の`root`ユーザーとしてジョブを実行します。ジョブを実行する別の非rootユーザーを指定するには、DockerイメージのDockerfileで`USER`ディレクティブを使用します。 ```dockerfile FROM amazonlinux RUN ["yum", "install", "-y", "nginx"] RUN ["useradd", "www"] USER "www" CMD ["/bin/bash"] ``` そのDockerイメージを使用してジョブを実行すると、指定されたユーザーとして実行されます。 ```yaml build: image: my/docker-build:image script: - whoami # www ``` ## Runnerがイメージをプルする方法を設定する {#configure-how-runners-pull-images} RunnerがレジストリからDockerイメージをプルする方法を定義するには、`config.toml`でプルポリシーを設定します。1つのポリシー、[ポリシーのリスト](#set-multiple-pull-policies)、または[特定のプルポリシーを許可](#allow-docker-pull-policies)できます。 `pull_policy`には次の値を使用します。 - [`always`](#set-the-always-pull-policy): デフォルト。ローカルイメージが存在する場合でもイメージをプルします。このプルポリシーは、ディスクに既に存在する`SHA256`で指定されたイメージには適用されません。 - [`if-not-present`](#set-the-if-not-present-pull-policy): ローカルバージョンが存在しない場合にのみ、イメージをプルします。 - [`never`](#set-the-never-pull-policy): イメージをプルせずに、ローカルイメージのみを使用します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "always" # available: always, if-not-present, never ``` ### `always`プルポリシーを設定する {#set-the-always-pull-policy} `always`オプションはデフォルトで有効になっており、常にコンテナの作成前にプルを開始します。このオプションにより、イメージが最新の状態になり、ローカルイメージが存在する場合でも古いイメージの使用を回避できます。 このプルポリシーは、次の場合に使用します。 - Runnerが常に最新のイメージをプルする必要がある。 - Runnerが公開されており、[オートスケール](../configuration/autoscale.md)向けに設定されているか、またはGitLabインスタンスのインスタンスRunnerとして設定されている。 Runnerがローカルに保存されているイメージを使用する必要がある場合は、このポリシーを**使用しないでください**。 `config.toml`で`always`を`pull policy`として設定します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "always" ``` ### `if-not-present`プルポリシーを設定する {#set-the-if-not-present-pull-policy} プルポリシーを`if-not-present`に設定すると、Runnerは最初にローカルイメージが存在するかどうかを確認します。ローカルイメージがない場合、Runnerはレジストリからイメージをプルします。 `if-not-present`ポリシーは、次の場合に使用します。 - ローカルイメージを使用するが、ローカルイメージが存在しない場合はイメージをプルする。 - 負荷が高いイメージやほとんど更新されないイメージのイメージレイヤの差分をRunnerが分析する時間を短縮する。この場合、イメージの更新を強制的に実行するために、ローカルのDocker Engineストアから定期的に手動でイメージを削除する必要があります。 次の場合にはこのポリシーを**使用しないでください**。 - Runnerを使用するさまざまなユーザーがプライベートイメージにアクセスできるインスタンスRunnerの場合。セキュリティの問題の詳細については、[if-not-presentプルポリシーでのプライベートDockerイメージの使用](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy)をご覧ください。 - ジョブが頻繁に更新され、最新のイメージバージョンでジョブを実行する必要がある場合。これにより実現するネットワーク負荷の軽減の価値は、ローカルイメージを頻繁に削除する価値を上回る可能性があります。 `config.toml`で`if-not-present`ポリシーを設定します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "if-not-present" ``` ### `never`プルポリシーを設定する {#set-the-never-pull-policy} 前提条件: - ローカルイメージには、インストール済みのDocker Engineと、使用されているイメージのローカルコピーが含まれている必要があります。 プルポリシーを`never`に設定すると、イメージのプルが無効になります。ユーザーはRunnerが実行されているDockerホストで、手動でプルされたイメージのみを使用できます。 次の場合に`never`プルポリシーを使用します。 - Runnerユーザーが使用するイメージを制御する場合。 - レジストリで公開されていない特定のイメージのみを使用できるプロジェクト専用のプライベートRunnerの場合。 [オートスケールされた](../configuration/autoscale.md)Docker executorには、`never`プルポリシーを**使用しないでください**。`never`プルポリシーは、選択したクラウドプロバイダーに定義済みのクラウドインスタンスイメージを使用する場合にのみ使用できます。 `config.toml`で`never`ポリシーを設定します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = "never" ``` ### 複数のプルポリシーを設定する {#set-multiple-pull-policies} プルが失敗した場合に実行する複数のプルポリシーをリストできます。Runnerは、プルが成功するか、リストされたポリシーがすべて処理されるまで、リストされた順にプルポリシーを処理します。たとえば、Runnerが`always`プルポリシーを使用している場合にレジストリが利用できない場合は、2番目のプルポリシーとして`if-not-present`を追加できます。この設定により、RunnerはローカルにキャッシュされているDockerイメージを使用できます。 このプルポリシーのセキュリティへの影響について詳しくは、[if-not-presentプルポリシーでのプライベートDockerイメージの使用](../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy)を参照してください。 複数のプルポリシーを設定するには、`config.toml`でプルポリシーをリストとして追加します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) pull_policy = ["always", "if-not-present"] ``` ### Dockerプルポリシーを許可する {#allow-docker-pull-policies} {{< history >}} - GitLab 15.1で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26753)されました。 {{< /history >}} `.gitlab-ci.yml`ファイルでプルポリシーを指定できます。このポリシーは、CI/CDジョブがイメージをフェッチする方法を決定します。 `.gitlab-ci.yml`ファイルで指定されているものの中から使用できるプルポリシーを制限するには、`allowed_pull_policies`を使用します。 たとえば、`always`および`if-not-present`プルポリシーのみを許可するには、それらを`config.toml`に追加します。 ```toml [[runners]] (...) executor = "docker" [runners.docker] (...) allowed_pull_policies = ["always", "if-not-present"] ``` - `allowed_pull_policies`を指定しない場合、リストは`pull_policy`キーワードで指定された値と一致します。 - `pull_policy`を指定しない場合、デフォルトは`always`です。 - `pull_policy`と`allowed_pull_policies`の両方に含まれているプルポリシーだけがジョブによって使用されます。有効なプルポリシーは、[`pull_policy`キーワード](#configure-how-runners-pull-images)で指定されているポリシーを`allowed_pull_policies`と比較することによって決定されます。GitLabでは、これら2つのポリシーリストの[共通部分](https://en.wikipedia.org/wiki/Intersection_(set_theory))が使用されます。たとえば、`pull_policy`が`["always", "if-not-present"]`、`allowed_pull_policies`が`["if-not-present"]`の場合、ジョブでは、両方のリストで定義されている唯一のプルポリシーである`if-not-present`だけが使用されます。 - 既存の`pull_policy`キーワードには、`allowed_pull_policies`で指定されているプルポリシーが少なくとも1つ含まれている必要があります。`pull_policy`の値の中に`allowed_pull_policies`と一致するものがない場合、ジョブは失敗します。 ### イメージのプルエラーメッセージ {#image-pull-error-messages} | エラーメッセージ | 説明 | |-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------| | `Pulling docker image registry.tld/my/image:latest ... ERROR: Build failed: Error: image registry.tld/my/image:latest not found` | Runnerはイメージを見つけることができません。`always`プルポリシーが設定されている場合に表示されます。 | | `Pulling docker image local_image:latest ... ERROR: Build failed: Error: image local_image:latest not found` | イメージがローカルでビルドされており、パブリックまたはデフォルトのDockerレジストリに存在していません。`always`プルポリシーが設定されている場合に表示されます。 | | `Pulling docker image registry.tld/my/image:latest ... WARNING: Cannot pull the latest version of image registry.tld/my/image:latest : Error: image registry.tld/my/image:latest not found WARNING: Locally found image will be used instead.` | Runnerは、イメージをプルする代わりに、ローカルイメージを使用しました。 | | `Pulling docker image local_image:latest ... ERROR: Build failed: Error: image local_image:latest not found` | イメージをローカルで見つけることができません。`never`プルポリシーが設定されている場合に表示されます。 | | `WARNING: Failed to pull image with policy "always": Error response from daemon: received unexpected HTTP status: 502 Bad Gateway (docker.go:143:0s) Attempt #2: Trying "if-not-present" pull policy Using locally found image version due to "if-not-present" pull policy` | Runnerはイメージのプルに失敗し、次にリストされているプルポリシーを使用してイメージのプルを試行します。複数のプルポリシーが設定されている場合に表示されます。 | ## 失敗したプルを再試行する {#retry-a-failed-pull} 失敗したイメージのプルを再試行するようにRunnerを設定するには、`config.toml`で同じポリシーを複数回指定します。 たとえば次の設定では、プルを1回再試行します。 ```toml [runners.docker] pull_policy = ["always", "always"] ``` この設定は、個々のプロジェクトの`.gitlab-ci.yml`ファイルの[`retry`ディレクティブ](https://docs.gitlab.com/ci/yaml/#retry)と似ていますが、Dockerのプルが最初に失敗した場合にのみ有効になります。 ## Windowsコンテナを使用する {#use-windows-containers} Docker executorでWindowsコンテナを使用するには、制限事項、サポートされているWindowsバージョン、およびWindows Docker executorの設定に関する次の情報に注意してください。 ### Nanoserverのサポート {#nanoserver-support} Windowsヘルパーイメージで導入されたPowerShell Coreのサポートにより、ヘルパーイメージの`nanoserver`バリアントを利用できるようになりました。 ### Windows上のDocker executorに関する既知のイシュー {#known-issues-with-docker-executor-on-windows} 以下は、Docker executorでWindowsコンテナを使用する場合の制限事項の一部です。 - Docker-in-DockerはDocker自体で[サポートされていない](https://github.com/docker-library/docker/issues/49)ため、サポートされていません。 - インタラクティブWebターミナルはサポートされていません。 - ホストデバイスのマウントはサポートされていません。 - ボリュームディレクトリをマウントする場合、ディレクトリが存在している必要があります。そうでない場合、Dockerはコンテナを起動できません。詳細については、[\#3754](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/3754)を参照してください。 - `docker-windows` executorは、Windowsで実行されているGitLab Runnerのみを使用して実行できます。 - [Windows上のLinuxコンテナ](https://learn.microsoft.com/en-us/virtualization/windowscontainers/deploy-containers/set-up-linux-containers)はまだ実験的機能であるため、サポートされていません。詳細については、[関連するイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4373)を確認してください。 - [Dockerでの制限](https://github.com/MicrosoftDocs/Virtualization-Documentation/pull/331)により、宛先パスのドライブ文字が`c:`ではない場合、以下ではパスがサポートされません。 - [`builds_dir`](../configuration/advanced-configuration.md#the-runners-section) - [`cache_dir`](../configuration/advanced-configuration.md#the-runners-section) - [`volumes`](../configuration/advanced-configuration.md#volumes-in-the-runnersdocker-section) つまり、`f:\\cache_dir`などの値はサポートされていませんが、`f:`はサポートされています。ただし、宛先パスが`c:`ドライブ上にある場合は、パスもサポートされます(`c:\\cache_dir`など)。 Dockerデーモンがイメージとコンテナを保持する場所を設定するには、Dockerデーモンの`daemon.json`ファイルで`data-root`パラメータを更新します。 詳細については、[設定ファイルを使用してDockerを設定する](https://learn.microsoft.com/en-us/virtualization/windowscontainers/manage-docker/configure-docker-daemon#configure-docker-with-a-configuration-file)を参照してください。 ### サポートされているWindowsバージョン {#supported-windows-versions} GitLab Runnerは、[Windowsのサポートライフサイクル](../install/support-policy.md#windows-version-support)に従う次のバージョンのWindowsのみをサポートします。 - Windows Server 2022 LTSC(21H2) - Windows Server 2019 LTSC(1809) 将来のWindows Serverバージョンについては、[将来のバージョンサポートポリシー](../install/support-policy.md#windows-version-support)があります。 Dockerデーモンが実行されているOSバージョンに基づいたコンテナのみを実行できます。たとえば、次の[`Windows Server Core`](https://hub.docker.com/r/microsoft/windows-servercore)イメージを使用できます。 - `mcr.microsoft.com/windows/servercore:ltsc2022` - `mcr.microsoft.com/windows/servercore:ltsc2022-amd64` - `mcr.microsoft.com/windows/servercore:1809` - `mcr.microsoft.com/windows/servercore:1809-amd64` - `mcr.microsoft.com/windows/servercore:ltsc2019` ### サポートされているDockerのバージョン {#supported-docker-versions} GitLab RunnerはDockerを使用して、実行されているWindows Serverのバージョンを検出します。したがって、GitLab Runnerを実行しているWindows Serverで、最新バージョンのDockerが実行されている必要があります。 GitLab Runnerで機能しない既知のDockerのバージョンは`Docker 17.06`です。DockerはWindows Serverのバージョンを識別しないため、次のエラーが発生します。 ```plaintext unsupported Windows Version: Windows Server Datacenter ``` [この問題のトラブルシューティングの詳細については、こちらを参照してください](../install/windows.md#docker-executor-unsupported-windows-version)。 ### Windows Docker executorを設定する {#configure-a-windows-docker-executor} {{< alert type="note" >}} ソースディレクトリとして`c:\\cache`を指定してRunnerが登録されている場合に`--docker-volumes`または`DOCKER_VOLUMES`環境変数を渡すときの[既知のイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4312)があります。 {{< /alert >}} Windowsを実行しているDocker executorの設定の例を次に示します。 ```toml [[runners]] name = "windows-docker-2019" url = "https://gitlab.com/" token = "xxxxxxx" executor = "docker-windows" [runners.docker] image = "mcr.microsoft.com/windows/servercore:1809_amd64" volumes = ["c:\\cache"] ``` Docker executorのその他の設定オプションについては、[高度な設定](../configuration/advanced-configuration.md#the-runnersdocker-section)セクションを参照してください。 ### サービス {#services} [ジョブごとにネットワークを](#create-a-network-for-each-job)有効にすることによって、[サービス](https://docs.gitlab.com/ci/services/)を使用することができます。 ## ネイティブステップRunnerインテグレーション {#native-step-runner-integration} {{< history >}} - GitLab 17.6.0で、機能フラグ`FF_USE_NATIVE_STEPS`により隠されている状態で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5069)されました。デフォルトでは無効になっています。 - GitLab 17.9.0で[更新](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5322)されました。GitLab Runnerは、`step-runner`バイナリをビルドコンテナに挿入し、それに合わせて`$PATH`環境変数を調整します。この拡張機能により、任意のイメージをビルドイメージとして使用できます。 {{< /history >}} Docker executorは、[`step-runner`](https://gitlab.com/gitlab-org/step-runner)が提供する`gRPC` APIを使用して[CI/CDステップ](https://docs.gitlab.com/ci/steps/)をネイティブに実行することをサポートしています。 この実行モードを有効にするには、従来の`script`キーワードの代わりに`run`キーワードを使用してCI/CDジョブを指定する必要があります。さらに、`FF_USE_NATIVE_STEPS`機能フラグを有効にする必要があります。この機能フラグは、ジョブレベルまたはパイプラインレベルで有効にできます。 ```yaml step job: stage: test variables: FF_USE_NATIVE_STEPS: true image: name: alpine:latest run: - name: step1 script: pwd - name: step2 script: env - name: step3 script: ls -Rlah ../ ``` ### 既知の問題 {#known-issues-1} - GitLab 17.9以降では、ビルドイメージで`ca-certificates`パッケージがインストールされている必要があります。インストールされていないと、`step-runner`がジョブで定義されているステップのプルに失敗します。たとえば、DebianベースのLinuxディストリビューションは、デフォルトでは`ca-certificates`をインストールしません。 - 17.9より前のGitLabバージョンでは、ビルドイメージで`$PATH`に`step-runner`バイナリが含まれている必要があります。これを実現するには、次のいずれかを実行します。 - 独自のカスタムビルドイメージを作成し、`step-runner`バイナリを含めます。 - `registry.gitlab.com/gitlab-org/step-runner:v0`イメージに、ジョブの実行に必要な依存関係が含まれている場合は、このイメージを使用します。 - Dockerコンテナを実行するステップの実行は、従来の`scripts`と同じ設定パラメータと制約に従う必要があります。たとえば、[Docker-in-Docker](#use-docker-in-docker-with-privileged-mode)を使用する必要があります。 - この実行モードでは、[`Github Actions`](https://gitlab.com/components/action-runner)の実行はサポートされていません。 ================================================ FILE: docs-locale/ja-jp/executors/docker_autoscaler.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Docker Autoscaler executor --- {{< history >}} - GitLab Runner 15.11.0で[実験的機能](https://docs.gitlab.com/policy/development_stages_support/#experiment)として導入されました。 - GitLab Runner 16.6で[ベータ](https://docs.gitlab.com/policy/development_stages_support/#beta)に[変更](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29404)されました。 - GitLab Runner 17.1で[一般提供](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29221)になりました。 {{< /history >}} Docker Autoscaler executorを使用する前に、一連の既知のイシューについて、GitLab Runnerオートスケールに関する[フィードバックイシュー](https://gitlab.com/gitlab-org/gitlab/-/issues/408131)を参照してください。 Docker Autoscaler executorは、Runnerマネージャーが処理するジョブに対処するために、オンデマンドでインスタンスを作成するオートスケール対応のDocker executorです。[Docker executor](docker.md)をラップしているため、すべてのDocker executorのオプションと機能がサポートされています。 Docker Autoscalerは、[フリートプラグイン](https://gitlab.com/gitlab-org/fleeting/plugins)を使用してオートスケールします。フリートとは、オートスケールされたインスタンスのグループの抽象化であり、Google Cloud、AWS、Azureなどのクラウドプロバイダーをサポートするプラグインを使用します。 ## フリートプラグインをインストールする {#install-a-fleeting-plugin} ご使用のターゲットプラットフォームに対応するプラグインをインストールするには、[フリートプラグインをインストールする](../fleet_scaling/fleeting.md#install-a-fleeting-plugin)を参照してください。具体的な設定について詳しくは、[それぞれのプラグインプロジェクトのドキュメント](https://gitlab.com/gitlab-org/fleeting/plugins)を参照してください。 ## Docker Autoscalerを設定する {#configure-docker-autoscaler} Docker Autoscaler executorは[Docker executor](docker.md)をラップしているため、すべてのDocker executorオプションと機能がサポートされています。 Docker Autoscalerを設定するには、`config.toml`で以下のように設定します。 - [`[runners]`](../configuration/advanced-configuration.md#the-runners-section)セクションで`executor`を`docker-autoscaler`として指定します。 - 以下のセクションで、要件に基づいてDocker Autoscalerを設定します。 - [`[runners.docker]`](../configuration/advanced-configuration.md#the-runnersdocker-section) - [`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section) ### 各Runner設定の専用オートスケールグループ {#dedicated-autoscaling-groups-for-each-runner-configuration} 各Docker Autoscaler設定には、それぞれに専用のオートスケールリソースが必要です。 - AWSでは専用のオートスケールグループ - GCPでは専用のインスタンスグループ - Azureでは専用のスケールセット これらのオートスケールリソースを以下の要素間で共有しないでください。 - 複数のRunnerマネージャー(個別のGitLab Runnerインストール) - 同じRunnerマネージャーの`config.toml`内の複数の`[[runners]]`エントリ Docker Autoscalerは、クラウドプロバイダーのオートスケールリソースと同期する必要があるインスタンスの状態を追跡します。複数のシステムが同じオートスケールリソースを管理しようとすると、競合するスケーリングコマンドが発行され、予測できない動作、ジョブの失敗、および高い可能性があるコストが発生する可能性があります。 ### 例: インスタンスあたり1つのジョブに対するAWSオートスケール {#example-aws-autoscaling-for-1-job-per-instance} 前提条件: - [Docker Engine](https://docs.docker.com/engine/)がインストールされたAMI。RunnerマネージャーがAMI上のDockerソケットにアクセスできるようにするには、ユーザーが`docker`グループに所属している必要があります。 {{< alert type="note" >}} AMIでは、GitLab Runnerをインストールする必要はありません。AMIを使用して起動されたインスタンスを、GitLabにRunnerとして登録しないようにしてください。 {{< /alert >}} - AWSオートスケールグループ。Runnerはすべてのスケール動作を直接管理します。スケーリングポリシーには、`none`を使用し、インスタンススケールイン保護をオンにします。複数のアベイラビリティーゾーンを設定している場合は、`AZRebalance`プロセスをオフにします。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy)が設定されたIAMポリシー。 この設定では以下がサポートされています。 - インスタンスあたりのキャパシティ: 1 - 使用回数: 1 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 キャパシティと使用回数を両方とも1に設定することで、各ジョブに、他のジョブの影響を受けない安全な一時インスタンスが与えられます。ジョブが完了すると即時に、ジョブが実行されていたインスタンスが削除されます。 アイドルスケールが5の場合、Runnerは将来の需要に備えて5つのインスタンス全体を維持しようとします(インスタンスあたりのキャパシティが1であるため)。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows AMIs # uncomment for Windows AMIs when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "ec2-user" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### 例: インスタンスあたり1つのジョブに対するGoogle Cloudインスタンスグループ {#example-google-cloud-instance-group-for-1-job-per-instance} 前提条件: - [Docker Engine](https://docs.docker.com/engine/)がインストールされたVMイメージ([`COS`](https://docs.cloud.google.com/container-optimized-os/docs)など)。 {{< alert type="note" >}} VMイメージでは、GitLab Runnerをインストールする必要はありません。VMイメージを使用して起動されたインスタンスを、GitLabにRunnerとして登録しないようにしてください。 {{< /alert >}} - シングルゾーンGoogle Cloudインスタンスグループ。**Autoscaling mode**で**Do not autoscale**を選択します。Runnerがオートスケールを処理し、Google Cloudインスタンスグループは処理しません。 {{< alert type="note" >}} 現在のところ、マルチゾーンインスタンスグループはサポートされていません。将来マルチゾーンインスタンスグループをサポートするための[イシュー](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud/-/issues/20)が存在しています。 {{< /alert >}} - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions)が設定されたIAMポリシー。GKEクラスターにRunnerをデプロイする場合は、KubernetesサービスアカウントとGCPサービスアカウントの間にIAMバインディングを追加できます。`credentials_file`でキーファイルを使用する代わりに、`iam.workloadIdentityUser`ロールでこのバインディングを追加し、GCPに対して認証できます。 この設定では以下がサポートされています。 - インスタンスあたりのキャパシティ: 1 - 使用回数: 1 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 キャパシティと使用回数を両方とも1に設定することで、各ジョブに、他のジョブの影響を受けない安全な一時インスタンスが与えられます。ジョブが完了すると即時に、ジョブが実行されていたインスタンスが削除されます。 アイドルスケールが5の場合、Runnerは将来の需要に備えて5つのインスタンス全体を維持しようとします(インスタンスあたりのキャパシティが1であるため)。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows Images # uncomment for Windows Images when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "runner" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### 例: インスタンスあたり1つのジョブに対するAzureスケールセット {#example-azure-scale-set-for-1-job-per-instance} 前提条件: - [Docker Engine](https://docs.docker.com/engine/)がインストールされているAzure VMイメージ。 {{< alert type="note" >}} VMイメージでは、GitLab Runnerをインストールする必要はありません。VMイメージを使用して起動されたインスタンスを、GitLabにRunnerとして登録しないようにしてください。 {{< /alert >}} - オートスケールポリシーが`manual`に設定されているAzureスケールセット。Runnerがスケーリングを処理します。 この設定では以下がサポートされています。 - インスタンスあたりのキャパシティ: 1 - 使用回数: 1 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 キャパシティと使用回数が両方とも`1`に設定されている場合、各ジョブに、他のジョブの影響を受けない安全な一時インスタンスが与えられます。ジョブが完了すると、ジョブが実行されたインスタンスが直ちに削除されます。 アイドルスケールが`5`に設定されている場合、Runnerは将来の需要に備えて5つのインスタンスを維持します(インスタンスあたりのキャパシティが1であるため)。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "docker autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" # use powershell or pwsh for Windows AMIs # uncomment for Windows AMIs when the Runner manager is hosted on Linux # environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=1"] executor = "docker-autoscaler" # Docker Executor config [runners.docker] image = "busybox:latest" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-docker-scale-set" subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "azureuser" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## スロットベースのcgroupサポート {#slot-based-cgroup-support} Docker Autoscaler executorは、同時実行ジョブ間のリソース分離を改善するために、スロットベースのcgroupをサポートしています。Cgroupパスは、`--cgroup-parent`フラグを使用して、Dockerコンテナに自動的に適用されます。 利点、前提条件、設定手順など、スロットベースのcgroupの詳細については、[slot-based cgroup support](../configuration/slot_based_cgroups.md)を参照してください。 ### Docker固有の設定 {#docker-specific-configuration} 標準のスロットcgroup設定に加えて、サービコンテナ用に個別のcgroupテンプレートを指定できます: ```toml [[runners]] executor = "docker+autoscaler" use_slot_cgroups = true slot_cgroup_template = "gitlab-runner/slot-${slot}" [runners.docker] service_slot_cgroup_template = "gitlab-runner/service-slot-${slot}" ``` 利用可能なすべてのオプションについては、[slot-based cgroup configuration documentation](../configuration/slot_based_cgroups.md#docker-specific-configuration)を参照してください。 ## トラブルシューティング {#troubleshooting} ### `ERROR: error during connect: ssh tunnel: EOF ()` {#error-error-during-connect-ssh-tunnel-eof-} インスタンスが外部ソース(オートスケールグループや自動スクリプトなど)によって削除された場合、ジョブは次のエラーで失敗します。 ```plaintext ERROR: Job failed (system failure): error during connect: Post "http://internal.tunnel.invalid/v1.43/containers/xyz/wait?condition=not-running": ssh tunnel: EOF () ``` また、GitLab Runnerのログには、ジョブに割り当てられたインスタンスIDの`instance unexpectedly removed`エラーが表示されます。 ```plaintext ERROR: instance unexpectedly removed instance= max-use-count=9999 runner=XYZ slots=map[] subsystem=taskscaler used=45 ``` このエラーを解決するには、クラウドプロバイダープラットフォームでインスタンスに関連するイベントを確認してください。たとえばAWSでは、イベントソース`ec2.amazonaws.com`のCloudTrailイベント履歴を確認します。 ### `ERROR: Preparation failed: unable to acquire instance: context deadline exceeded` {#error-preparation-failed-unable-to-acquire-instance-context-deadline-exceeded} [AWSフリートプラグイン](https://gitlab.com/gitlab-org/fleeting/plugins/aws)を使用している場合、ジョブが失敗して次のエラーになることが断続的に発生する可能性があります。 ```plaintext ERROR: Preparation failed: unable to acquire instance: context deadline exceeded ``` `reserved`のインスタンス数が変動するため、多くの場合、これはAWS CloudWatchのログの中に示されます。 ```plaintext "2024-07-23T18:10:24Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:0,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:10:25Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:1,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:11:15Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:0,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", "2024-07-23T18:11:16Z","instance_count:1,max_instance_count:1000,acquired:0,unavailable_capacity:0,pending:0,reserved:1,idle_count:0,scale_factor:0,scale_factor_limit:0,capacity_per_instance:1","required scaling change", ``` このエラーを解決するには、AWSでオートスケールグループに対して`AZRebalance`プロセスが無効になっていることを確認してください。 ================================================ FILE: docs-locale/ja-jp/executors/docker_machine.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Docker MachineでのオートスケールのためにGitLab Runnerをインストールして登録する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="note" >}} Docker Machine ExecutorはGitLab 17.5で非推奨となりました。GitLab 20.0(2027年5月)で削除される予定です。GitLab 20.0まではDocker Machine Executorのサポートが継続されますが、新機能を追加する予定はありません。CI/CDジョブの実行を妨げる可能性のある重大なバグ、または実行コストに影響を与えるバグのみに対処します。Amazon Web Services(AWS)EC2、Microsoft Azure Compute、またはGoogle Compute Engine(GCE)でDocker Machine Executorを使用している場合は、[GitLab Runner Autoscaler](../runner_autoscale/_index.md)に移行してください。 {{< /alert >}} オートスケールアーキテクチャの概要については、[オートスケールに関する包括的なドキュメント](../configuration/autoscale.md)をご覧ください。 ## Docker Machineのフォークバージョン {#forked-version-of-docker-machine} Dockerでは[Docker Machineが非推奨になりました](https://gitlab.com/gitlab-org/gitlab/-/issues/341856)。ただしGitLabでは、Docker Machine executorを利用しているGitLab Runnerユーザーのために[Docker Machineフォーク](https://gitlab.com/gitlab-org/ci-cd/docker-machine)を維持しています。このフォークは、`docker-machine`の最新の`main`ブランチをベースにしており、次のバグに対する追加パッチがいくつか含まれています。 - [DigitalOceanドライバーをRateLimit対応にする](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/2) - [Googleドライバーオペレーションチェックにバックオフを追加する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/7) - [マシン作成のための`--google-min-cpu-platform`オプションを追加する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/9) - [キャッシュされているIPをGoogleドライバーに使用する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/15) - [キャッシュされているIPをAWSドライバーに使用する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/14) - [Google Compute EngineでGPUを使用するためのサポートを追加する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/48) - [IMDSv2でAWSインスタンスを実行するためのサポートを追加する](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/merge_requests/49) [Docker Machineフォーク](https://gitlab.com/gitlab-org/ci-cd/docker-machine)の目的は、実行コストに影響を与える重大な問題とバグのみを修正することです。新しい機能を追加する予定はありません。 ## 環境を準備する {#preparing-the-environment} オートスケール機能を使用するには、DockerとGitLab Runnerが同じマシンにインストールされている必要があります。 1. 踏み台サーバーとして機能できる新しいLinuxベースのマシンにサインインします。この踏み台サーバーでDockerが新しいマシンを作成します。 1. [GitLab Runnerをインストールします](../install/_index.md)。 1. [Docker Machineフォーク](https://gitlab.com/gitlab-org/ci-cd/docker-machine)からDocker Machineをインストールします。 1. オプションですが、オートスケールされたRunnerで使用する[プロキシコンテナレジストリとキャッシュサーバー](../configuration/speed_up_job_execution.md)を準備することを推奨します。 ## GitLab Runnerを設定する {#configuring-gitlab-runner} 1. `docker-machine`と`gitlab-runner`を使用するという基本的な概念を理解します。 - [GitLab Runnerのオートスケール](../configuration/autoscale.md)を読みます - [GitLab Runner MachineOptions](../configuration/advanced-configuration.md#the-runnersmachine-section)を読みます 1. Docker Machineを**初めて**使用する場合は、[Docker Machineドライバー](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/tree/main/drivers)を指定した`docker-machine create ...`コマンドを手動で実行する方法が最良の方法です。`[runners.machine]`セクションの[MachineOptions](../configuration/advanced-configuration.md#the-runnersmachine-section)で設定するオプションを使用して、このコマンドを実行します。この手法ではDocker Machine環境が適切に設定され、指定されたオプションが検証されます。その後に`docker-machine rm [machine_name]`でマシンを破棄し、Runnerを起動できます。 {{< alert type="note" >}} **最初の使用時**に実行される`docker-machine create`に対する複数の同時リクエストは、適切ではありません。`docker+machine` executorが使用されている場合、Runnerはいくつかの同時`docker-machine create`コマンドを起動することがあります。Docker Machineがこの環境に初めて導入される場合、各プロセスはDocker API認証のためのSSHキーとSSL証明書の作成を試行します。この動作が原因で、同時実行プロセスが互いに干渉します。これにより、動作しない環境になる可能性があります。そのため、Docker MachineでGitLab Runnerを初めてセットアップするときには、テストマシンを手動で作成することが重要です。 1. [Runnerを登録](../register/_index.md)し、要求されたら`docker+machine` executorを選択します。 1. [`config.toml`](../commands/_index.md#configuration-file)を編集し、Docker Machineを使用するようにRunnerを設定します。[GitLab Runner](../configuration/autoscale.md)オートスケールに関する詳細情報を記載した専用ページを参照してください。 1. これで、プロジェクトでパイプラインを新規作成して開始できます。数秒後に`docker-machine ls`を実行すると、新しいマシンが作成されていることがわかります。 {{< /alert >}} ## GitLab Runnerをアップグレードする {#upgrading-gitlab-runner} 1. ご使用のオペレーティングシステムがGitLab Runnerを自動的に再起動するように設定されているかどうかを確認します(たとえば、そのサービスファイルを確認します)。 - **設定されている**場合は、サービスマネージャーが[`SIGQUIT`を使用するように設定されている](../configuration/init.md)ことを確認し、サービスツールを使用してプロセスを停止します。 ```shell # For systemd sudo systemctl stop gitlab-runner # For upstart sudo service gitlab-runner stop ``` - **設定されていない**場合は、プロセスを手動で停止できます。 ```shell sudo killall -SIGQUIT gitlab-runner ``` {{< alert type="note" >}} [`SIGQUIT`シグナル](../commands/_index.md#signals)を送信すると、プロセスが正常に停止します。プロセスは新しいジョブの受け入れを停止し、現在のジョブが完了すると直ちに終了します。 {{< /alert >}} 1. GitLab Runnerが終了するまで待ちます。`gitlab-runner status`でその状態を確認するか、正常なシャットダウンが行われるまで最大30分間待つことができます。 ```shell for i in `seq 1 180`; do # 1800 seconds = 30 minutes gitlab-runner status || break sleep 10 done ``` 1. これで、ジョブを中断することなく、新しいバージョンのGitLab Runnerを安全にインストールできます。 ## Docker Machineのフォークバージョンを使用する {#using-the-forked-version-of-docker-machine} ### インストール {#install} 1. [適切な`docker-machine`バイナリ](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/releases)をダウンロードします。`PATH`がアクセスできる場所にバイナリをコピーし、実行可能にします。たとえば、`v0.16.2-gitlab.43`をダウンロードしてインストールするには、次のようにします。 ```shell curl -O "https://gitlab-docker-machine-downloads.s3.amazonaws.com/v0.16.2-gitlab.43/docker-machine-Linux-x86_64" cp docker-machine-Linux-x86_64 /usr/local/bin/docker-machine chmod +x /usr/local/bin/docker-machine ``` ### Google Compute EngineでGPUを使用する {#using-gpus-on-google-compute-engine} {{< alert type="note" >}} GPUは[すべてのexecutorでサポートされています](../configuration/gpus.md)。GPUサポートのためだけにDocker Machineを使用する必要はありません。Docker Machine ExecutorはGPUノードをスケールアップおよびスケールダウンします。この目的で[Kubernetes executor](kubernetes/_index.md)を使用することもできます。 {{< /alert >}} Docker Machine[フォーク](#forked-version-of-docker-machine)を使用して、[GPU(グラフィックスプロセッシングユニット)を使用するGoogle Compute Engineインスタンス](https://docs.cloud.google.com/compute/docs/gpus)を作成できます。 #### Docker Machine GPUオプション {#docker-machine-gpu-options} GPUを使用するインスタンスを作成するには、次のDocker Machineオプションを使用します。 | オプション | 例 | 説明 | |-------------------------------|--------------------------------|-------------| | `--google-accelerator` | `type=nvidia-tesla-p4,count=1` | インスタンスにアタッチするGPUアクセラレータのタイプと数を指定します(`type=TYPE,count=N`形式)。 | | `--google-maintenance-policy` | `TERMINATE` | [Google CloudではGPUインスタンスのライブ移行が許可されていない](https://docs.cloud.google.com/compute/docs/instances/live-migration-process)ため、常に`TERMINATE`を使用してください。 | | `--google-machine-image` | `https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110` | GPU対応オペレーティングシステムのURL。[使用可能なイメージのリスト](https://docs.cloud.google.com/deep-learning-vm/docs/images)を参照してください。 | | `--google-metadata` | `install-nvidia-driver=True` | このフラグは、NVIDIA GPUドライバーをインストールするようにイメージに指示します。 | これらの引数は、[`gcloud compute`のコマンドライン引数](https://docs.cloud.google.com/compute/docs/gcloud-compute)にマップされます。詳細については、[GPUがアタッチされたVMの作成に関するGoogleドキュメント](https://docs.cloud.google.com/compute/docs/gpus/create-vm-with-gpus)を参照してください。 #### Docker Machineオプションを検証する {#verifying-docker-machine-options} システムを準備し、Google Compute EngineでGPUを作成できることをテストするには、次の手順に従います: 1. Docker Machineの[Google Compute Engineドライバー認証情報をセットアップ](https://gitlab.com/gitlab-org/ci-cd/docker-machine/-/blob/main/docs/drivers/gce.md#credentials)します。場合によっては、VMにデフォルトのサービスアカウントがないときに環境変数をRunnerにエクスポートする必要があります。その方法は、Runnerの起動方法によって異なります。たとえば、次のいずれかを使用します。 - `systemd`または`upstart`: [カスタム環境変数の設定に関するドキュメント](../configuration/init.md#setting-custom-environment-variables)を参照してください。 - Helmチャートを使用したKubernetes: [`values.yaml`エントリ](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/blob/5e7c5c0d6e1159647d65f04ff2cc1f45bb2d5efc/values.yaml#L431-438)を更新します。 - Docker: `-e`オプションを使用します(`docker run -e GOOGLE_APPLICATION_CREDENTIALS=/path/to/credentials.json gitlab/gitlab-runner`など)。 1. 必要なオプションを指定した`docker-machine`が仮想マシンを作成できることを確認します。たとえば、1つのNVIDIA Tesla P4アクセラレータを備えた`n1-standard-1`マシンを作成するには、`test-gpu`を名前で置き換えて、次のように実行します。 ```shell docker-machine create --driver google --google-project your-google-project \ --google-disk-size 50 \ --google-machine-type n1-standard-1 \ --google-accelerator type=nvidia-tesla-p4,count=1 \ --google-maintenance-policy TERMINATE \ --google-machine-image https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110 \ --google-metadata "install-nvidia-driver=True" test-gpu ``` 1. GPUがアクティブであることを確認するには、マシンにSSHで接続し、`nvidia-smi`を実行します。 ```shell $ docker-machine ssh test-gpu sudo nvidia-smi +-----------------------------------------------------------------------------+ | NVIDIA-SMI 450.51.06 Driver Version: 450.51.06 CUDA Version: 11.0 | |-------------------------------+----------------------+----------------------+ | GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC | | Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. | | | | MIG M. | |===============================+======================+======================| | 0 Tesla P4 Off | 00000000:00:04.0 Off | 0 | | N/A 43C P0 22W / 75W | 0MiB / 7611MiB | 3% Default | | | | N/A | +-------------------------------+----------------------+----------------------+ +-----------------------------------------------------------------------------+ | Processes: | | GPU GI CI PID Type Process name GPU Memory | | ID ID Usage | |=============================================================================| | No running processes found | +-----------------------------------------------------------------------------+ ``` 1. 費用を節約するために、このテストインスタンスを削除します。 ```shell docker-machine rm test-gpu ``` #### GitLab Runnerを設定する {#configuring-gitlab-runner-1} 1. これらのオプションを検証したら、[`runners.docker`設定](../configuration/advanced-configuration.md#the-runnersdocker-section)で使用可能なすべてのGPUを使用するようにDocker executorを設定します。次に、[GitLab Runner `runners.machine`設定の`MachineOptions`設定](../configuration/advanced-configuration.md#the-runnersmachine-section)にDocker Machineオプションを追加します。例: ```toml [runners.docker] gpus = "all" [runners.machine] MachineOptions = [ "google-project=your-google-project", "google-disk-size=50", "google-disk-type=pd-ssd", "google-machine-type=n1-standard-1", "google-accelerator=count=1,type=nvidia-tesla-p4", "google-maintenance-policy=TERMINATE", "google-machine-image=https://www.googleapis.com/compute/v1/projects/deeplearning-platform-release/global/images/family/tf2-ent-2-3-cu110", "google-metadata=install-nvidia-driver=True" ] ``` ## トラブルシューティング {#troubleshooting} Docker Machine executorを使用するときに次の問題が発生する可能性があります。 ### エラー: マシンの作成エラー {#error-error-creating-machine} Docker Machineをインストールするときに、`ERROR: Error creating machine: Error running provisioning: error installing docker`というエラーが発生することがあります。 Docker Machineは次のスクリプトを使用して、新しくプロビジョニングされた仮想マシンへのDockerのインストールを試行します。 ```shell if ! type docker; then curl -sSL "https://get.docker.com" | sh -; fi ``` `docker`コマンドが成功した場合、Docker MachineはDockerがインストールされたとみなして続行します。 成功しなかった場合、Docker Machineは`https://get.docker.com`でスクリプトをダウンロードして実行しようとします。インストールが失敗する場合は、オペレーティングシステムがDockerでサポートされなくなった可能性があります。 この問題を解決するには、GitLab Runnerがインストールされている環境で`MACHINE_DEBUG=true`を設定して、Docker Machineでデバッグを有効にできます。 ### エラー: Dockerデーモンに接続できない {#error-cannot-connect-to-the-docker-daemon} ジョブは、準備段階で次のエラーメッセージで失敗することがあります。 ```plaintext Preparing environment ERROR: Job failed (system failure): prepare environment: Cannot connect to the Docker daemon at tcp://10.200.142.223:2376. Is the docker daemon running? (docker.go:650:120s). Check https://docs.gitlab.com/runner/shells/#shell-profile-loading for more information ``` このエラーは、Docker Machine executorによって作成されたVMで、Dockerデーモンが予期されている時間内に起動できなかった場合に発生します。この問題を修正するには、[`[runners.docker]`](../configuration/advanced-configuration.md#the-runnersdocker-section)セクションの`wait_for_services_timeout`の値を大きくします。 ================================================ FILE: docs-locale/ja-jp/executors/instance.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: インスタンスexecutor --- {{< history >}} - GitLab Runner 15.11.0で[実験的機能](https://docs.gitlab.com/policy/development_stages_support/#experiment)として導入されました。 - GitLab Runner 16.6で[ベータ](https://docs.gitlab.com/policy/development_stages_support/#beta)に[変更](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29404)されました。 - GitLab Runner 17.1で[一般提供](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29221)になりました。 {{< /history >}} インスタンスexecutorは、Runnerマネージャーが処理するジョブの予期されるボリュームに対応するために、オンデマンドでインスタンスを作成するオートスケール対応のexecutorです。 ジョブがホストインスタンス、オペレーティングシステム、および接続デバイスへのフルアクセスを必要とする場合は、インスタンスexecutorを使用できます。インスタンスエグゼキューターは、さまざまなレベルの分離とセキュリティを備えたシングルテナントおよびマルチテナントジョブに対応するように構成することもできます。 ## ネストされた仮想化 {#nested-virtualization} インスタンスエグゼキューターは、GitLabが開発した[ネスティングデーモン](https://gitlab.com/gitlab-org/fleeting/nesting)を使用したネストされた仮想化をサポートしています。ネスティングデーモンを使用すると、ジョブのように、分離された短期間のワークロードに使用されるホストシステム上で、事前構成された仮想マシンの作成と削除ができます。ネストは、Apple Siliconインスタンスでのみサポートされています。 ## オートスケールの環境を準備します {#prepare-the-environment-for-autoscaling} オートスケールの環境を準備するには、次のようにします: 1. Runnerマネージャーがインストールおよび構成されているターゲットプラットフォーム用の[Fleetingプラグインをインストール](../fleet_scaling/fleeting.md#install-a-fleeting-plugin)します。 1. 使用しているプラットフォームのVMイメージを作成します。イメージには以下を含める必要があります: - Git - GitLab Runnerバイナリ {{< alert type="note" >}} ジョブのアーティファクトとキャッシュを処理するには、仮想マシンにGitLab Runnerバイナリをインストールし、Runner実行可能ファイルをデフォルトのパスに保持します。VMイメージでは、GitLab Runnerをインストールする必要はありません。VMイメージを使用して起動されたインスタンスを、GitLabにRunnerとして登録しないようにしてください。 {{< /alert >}} - 実行する予定のジョブに必要な依存関係 ## オートスケールするようにエグゼキューターを構成します {#configure-the-executor-to-autoscale} 前提要件: - 管理者である必要があります。 オートスケールを行うようにインスタンスエグゼキューターを構成するには、`config.toml`の次のセクションを更新します: - [`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section) - [`[runners.instance]`](../configuration/advanced-configuration.md#the-runnersinstance-section) ## プリエンプティブモード {#preemptive-mode} FleetingとTaskscalerを使用する場合: - オンにすると、Runnerマネージャーは、アイドル状態のインスタンスが使用可能になるまで、新しいCI/CDジョブをリクエストしません。このモードでは、CI/CDジョブはほぼすぐに実行されます。 - プリエンプティブモードがオフになっている場合、Runnerマネージャーは、アイドル状態のインスタンスがそれらのジョブを実行できるかどうかに関係なく、新しいCI/CDジョブをリクエストします。ジョブの数は、`max_instances`と`capacity_per_instance`に基づいています。このモードでは、CI/CDジョブの開始時間が遅くなります。新しいインスタンスをプロビジョニングできない場合があり、CI/CDジョブが実行されない可能性があります。 ## AWSオートスケールグループ構成の例 {#aws-autoscaling-group-configuration-examples} ### インスタンスごとのジョブ数1 {#one-job-per-instance} 前提要件: - 少なくとも`git`とGitLab RunnerがインストールされたAMI。 - AWS Auto Scalingグループ。スケールポリシーには`none`を使用します。Runnerがスケーリングを処理します。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy)が設定されたIAMポリシー。 この設定では以下がサポートされています: - 各インスタンスの`1`の容量。 - 使用回数: `1`。 - アイドルスケール: `5`。 - アイドル時間: 20分。 - インスタンスの最大数: `10`。 キャパシティと使用回数が両方とも`1`に設定されている場合、各ジョブに、他のジョブの影響を受けない安全な一時的なインスタンスが与えられます。ジョブが完了すると、ジョブが実行されたインスタンスが直ちに削除されます。 各インスタンスの容量が`1`で、アイドルスケールが`5`の場合、Runnerは将来の需要に備えて5つのインスタンス全体を保持します。これらのインスタンスは、少なくとも20分間は残ります。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "ec2-user" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### 無制限の用途でインスタンスあたり5つのジョブ {#five-jobs-per-instance-with-unlimited-uses} 前提要件: - 少なくとも`git`とGitLab RunnerがインストールされたAMI。 - スケールポリシーが`none`に設定されたAWSオートスケールグループ。Runnerがスケーリングを処理します。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy)が設定されたIAMポリシー。 この設定では以下がサポートされています: - 各インスタンスの`5`の容量。 - 無制限の使用回数。 - アイドルスケール: `5`。 - アイドル時間: 20分。 - インスタンスの最大数: `10`。 インスタンスあたりの容量を`5`に設定し、使用回数を無制限にすると、各インスタンスはインスタンスのライフタイム全体で5つのジョブを同時に実行します。 アイドルスケールが`5`で、インスタンスのアイドル容量が`5`の場合、使用中の容量が5を下回ると、アイドルインスタンスが1つ作成されます。アイドルインスタンスは、少なくとも20分間は残ります。 これらの環境で実行されるジョブは、**信頼**されている必要があります。それらの間にはほとんど分離がなく、各ジョブが別のジョブのパフォーマンスに影響を与える可能性があるためです。 Runnerの`concurrent`フィールドは50(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-asg" # AWS Autoscaling Group name profile = "default" # optional, default is 'default' config_file = "/home/user/.aws/config" # optional, default is '~/.aws/config' credentials_file = "/home/user/.aws/credentials" # optional, default is '~/.aws/credentials' [runners.autoscaler.connector_config] username = "Administrator" timeout = "5m0s" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### インスタンスあたり2つのジョブ、無制限の使用、EC2 Macインスタンスでのネストされた仮想化 {#two-jobs-per-instance-unlimited-uses-nested-virtualization-on-ec2-mac-instances} 前提要件: - [ネスティング](https://gitlab.com/gitlab-org/fleeting/nesting)と[Tart](https://github.com/cirruslabs/tart)がインストールされたApple Silicon AMI。 - Runnerが使用するTart VMイメージ。VMイメージは、ジョブの`image`キーワードで指定されます。VMイメージには、少なくとも`git`とGitLab Runnerがインストールされている必要があります。 - AWS Auto Scalingグループ。Runnerがスケールを処理するため、スケーリングポリシーには`none`を使用します。MacOSのASGを設定する方法については、[EC2 Macインスタンスのオートスケールの実装](https://aws.amazon.com/blogs/compute/implementing-autoscaling-for-ec2-mac-instances/)を参照してください。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/aws#recommended-iam-policy)が設定されたIAMポリシー。 この設定では以下がサポートされています: - 各インスタンスの`2`の容量。 - 無制限の使用回数。 - 分離されたジョブをサポートするためのネストされた仮想化。ネストされた仮想化は、[ネスティング](https://gitlab.com/gitlab-org/fleeting/nesting)がインストールされたAppleシリコンインスタンスでのみ使用できます。 - アイドルスケール: `5`。 - アイドル時間: 20分。 - インスタンスの最大数: `10`。 各インスタンスの容量が`2`で、使用回数が無制限の場合、各インスタンスはインスタンスのライフタイムの間、2つのジョブを同時に実行します。 アイドルスケールが`2`の場合、使用中の容量が`2`を下回ると、アイドルインスタンスが1つ作成されます。アイドルインスタンスは、少なくとも24時間は残ります。この時間枠は、AWS MacOSインスタンスホストの24時間の最小割り当て期間によるものです。 この環境で実行されるジョブは、[ネスティング](https://gitlab.com/gitlab-org/fleeting/nesting)が各ジョブのネストされた仮想化に使用されるため、信頼する必要はありません。これは、Apple Siliconインスタンスでのみ機能します。 Runnerの`concurrent`フィールドは8(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 8 [[runners]] name = "macos applesilicon autoscaler example" url = "https://gitlab.com" token = "" executor = "instance" [runners.instance] allowed_images = ["*"] # allow any nesting image [runners.autoscaler] capacity_per_instance = 2 # AppleSilicon can only support 2 VMs per host max_use_count = 0 max_instances = 4 plugin = "aws" # in GitLab 16.11 and later, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # in GitLab 16.10 and earlier, manually install the plugin and use: # plugin = "fleeting-plugin-aws" [[runners.autoscaler.policy]] idle_count = 2 idle_time = "24h" # AWS's MacOS instances [runners.autoscaler.connector_config] username = "ec2-user" key_path = "macos-key.pem" timeout = "1h" # connecting to a MacOS instance can take some time, as they can be slow to provision [runners.autoscaler.plugin_config] name = "mac2metal" region = "us-west-2" [runners.autoscaler.vm_isolation] enabled = true nesting_host = "unix:///Users/ec2-user/Library/Application Support/nesting.sock" [runners.autoscaler.vm_isolation.connector_config] username = "nested-vm-username" password = "nested-vm-password" timeout = "20m" ``` ## Google Cloudインスタンスグループ構成の例 {#google-cloud-instance-group-configuration-examples} ### Google Cloudインスタンスグループを使用したインスタンスあたりのジョブ数1 {#one-job-per-instance-using-a-google-cloud-instance-group} 前提要件: - 少なくとも`git`とGitLab Runnerがインストールされたカスタムイメージ。 - オートスケールモードが`do not autoscale`に設定されているGoogle Cloudインスタンスグループ。Runnerがスケーリングを処理します。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions)が設定されたIAMポリシー。GKEクラスターにRunnerをデプロイする場合は、KubernetesサービスアカウントとGCPサービスアカウントの間にIAMバインディングを追加できます。`credentials_file`でキーファイルを使用する代わりに、`iam.workloadIdentityUser`ロールでこのバインディングを追加し、GCPに対して認証できます。 この設定では以下がサポートされています: - インスタンスあたりのキャパシティ: 1 - 使用回数: 1 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 キャパシティと使用回数が両方とも`1`に設定されている場合、各ジョブに、他のジョブの影響を受けない安全な一時インスタンスが与えられます。ジョブが完了すると、ジョブが実行されたインスタンスが直ちに削除されます。 アイドルスケールが`5`に設定されている場合、Runnerは将来の需要に備えて5つのインスタンスを維持します(インスタンスあたりのキャパシティが1であるため)。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1-c" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "runner" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Google Cloudインスタンスグループを使用した、インスタンスあたり5つのジョブ、無制限の使用 {#five-jobs-per-instance-unlimited-uses-using-google-cloud-instance-group} 前提要件: - 少なくとも`git`とGitLab Runnerがインストールされたカスタムイメージ。 - インスタンスグループ。Runnerがスケールを処理するため、「オートスケールモード」では「オートスケールしない」を選択します。 - [適切な権限](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud#required-permissions)が設定されたIAMポリシー。 この設定では以下がサポートされています: - インスタンスあたりのキャパシティ: 5。 - 無制限の使用回数 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 容量が`5`に設定され、使用回数が無制限の場合、各インスタンスはインスタンスのライフタイムの間、5つのジョブを同時に実行します。 これらの環境で実行されるジョブは、**信頼**されている必要があります。それらの間にはほとんど分離がなく、各ジョブが別のジョブのパフォーマンスに影響を与える可能性があるためです。 アイドルスケールが`5`の場合、使用中の容量が`5`を下回ると、アイドルインスタンスが1つ作成されます。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは50(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "googlecloud" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-googlecompute" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-instance-group" # Google Cloud Instance Group name project = "my-gcp-project" zone = "europe-west1-c" credentials_file = "/home/user/.config/gcloud/application_default_credentials.json" # optional, default is '~/.config/gcloud/application_default_credentials.json' [runners.autoscaler.connector_config] username = "Administrator" timeout = "5m0s" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## Azureスケールセット構成の例 {#azure-scale-set-configuration-examples} ### Azureスケールセットを使用したインスタンスごとのジョブ数1 {#one-job-per-instance-using-an-azure-scale-set} 前提要件: - 少なくとも`git`とGitLab Runnerがインストールされたカスタムイメージ。 - オートスケールモードが`manual`に設定され、オーバープロビジョニングがオフになっているAzureスケールセット。Runnerがスケーリングを処理します。 この設定では以下がサポートされています: - インスタンスあたりのキャパシティ: 1 - 使用回数: 1 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 キャパシティと使用回数が両方とも`1`に設定されている場合、各ジョブに、他のジョブの影響を受けない安全な一時インスタンスが与えられます。ジョブが完了すると、ジョブが実行されたインスタンスが直ちに削除されます。 アイドルスケールが`5`に設定されている場合、Runnerは将来の需要に備えて5つのインスタンスを維持します(インスタンスあたりのキャパシティが1であるため)。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは10(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 10 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 1 max_use_count = 1 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-linux-scale-set" # Azure scale set name subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "runner" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ### Azureスケールセットを使用した、インスタンスあたり5つのジョブ、無制限の使用 {#five-jobs-per-instance-unlimited-uses-using-an-azure-scale-set} 前提要件: - 少なくとも`git`とGitLab Runnerがインストールされたカスタムイメージ。 - オートスケールモードが`manual`に設定され、オーバープロビジョニングがオフになっているAzureスケールセット。Runnerがスケーリングを処理します。 この設定では以下がサポートされています: - インスタンスあたりのキャパシティ: 5。 - 無制限の使用回数 - アイドルスケール: 5 - アイドル時間: 20分 - インスタンスの最大数: 10 容量が`5`に設定され、使用回数が無制限の場合、各インスタンスはインスタンスのライフタイムの間、5つのジョブを同時に実行します。 これらの環境で実行されるジョブは、**信頼**されている必要があります。それらの間にはほとんど分離がなく、各ジョブが別のジョブのパフォーマンスに影響を与える可能性があるためです。 アイドルスケールが`2`の場合、使用中の容量が`5`を下回ると、アイドルインスタンスが1つ作成されます。これらのインスタンスは少なくとも20分間維持されます。 Runnerの`concurrent`フィールドは50(インスタンスの最大数*インスタンスあたりのキャパシティ)に設定されます。 ```toml concurrent = 50 [[runners]] name = "instance autoscaler example" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" # Autoscaler config [runners.autoscaler] plugin = "azure" # for >= 16.11, ensure you run `gitlab-runner fleeting install` to automatically install the plugin # for versions < 17.0, manually install the plugin and use: # plugin = "fleeting-plugin-azure" capacity_per_instance = 5 max_use_count = 0 max_instances = 10 [runners.autoscaler.plugin_config] # plugin specific configuration (see plugin documentation) name = "my-windows-scale-set" # Azure scale set name subscription_id = "9b3c4602-cde2-4089-bed8-889e5a3e7102" resource_group_name = "my-resource-group" [runners.autoscaler.connector_config] username = "Administrator" password = "my-scale-set-static-password" use_static_credentials = true timeout = "10m" use_external_addr = true [[runners.autoscaler.policy]] idle_count = 5 idle_time = "20m0s" ``` ## トラブルシューティング {#troubleshooting} インスタンスexecutorを使用するときに次の問題が発生する可能性があります: ### `sh: 1: eval: Running on ip-x.x.x.x via runner-host...n: not found` {#sh-1-eval-running-on-ip-xxxx-via-runner-hostn-not-found} このエラーは通常、準備ステップの`eval`コマンドが失敗した場合に発生します。このエラーを解決するには、`bash`シェルに切り替え、[機能フラグ](../configuration/feature-flags.md) `FF_USE_NEW_BASH_EVAL_STRATEGY`を有効にします。 ================================================ FILE: docs-locale/ja-jp/executors/kubernetes/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see title: Kubernetes executor --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} ビルドにKubernetesクラスターを使用する場合、Kubernetes executorを使用します。executorはKubernetesクラスターAPIを呼び出し、GitLab CIジョブごとにポッドを作成します。 Kubernetes executorは、ビルドを複数のステップに分割します。 1. **準備**: Kubernetesクラスターに対してポッドを作成します。これにより、ビルドに必要なコンテナと、実行するサービスが作成されます。 1. **ビルド前**: クローン、キャッシュの復元、および前のステージからアーティファクトのダウンロードを実行します。このステップは、ポッドの一部である特別なコンテナで実行されます。 1. **ビルド**: ユーザービルド。 1. **ビルド後**: キャッシュの作成、GitLabへのアーティファクトのアップロードを実行します。このステップでも、ポッドの一部である特別なコンテナを使用します。 ## RunnerがKubernetesポッドを作成する仕組み {#how-the-runner-creates-kubernetes-pods} 次の図は、GitLabインスタンスとKubernetesクラスターでホストされているRunner間の相互作用を示しています。RunnerはKubernetes APIを呼び出して、クラスター上にポッドを作成します。 ポッドは、`.gitlab-ci.yml`ファイルまたは`config.toml`ファイルで定義されている`service`ごとに次のコンテナで構成されます。 - `build`として定義されているビルドコンテナ。 - `helper`として定義されているヘルパーコンテナ。 - `svc-X`として定義されているサービスコンテナ。`X`は`[0-9]+`です。 サービスとコンテナは同じKubernetesポッドで実行され、同じlocalhostアドレスを共有します。次の制限が適用されます。 - これらのサービスには、そのDNS名を介してアクセスできます。これよりも古いバージョンを使用する場合は、`localhost`を使用する必要があります。 - 同じポートを使用する複数のサービスを使用することはできません。たとえば、2つの`mysql`サービスを同時に使用することはできません。 ```mermaid sequenceDiagram participant G as GitLab instance participant R as Runner on Kubernetes cluster participant Kube as Kubernetes API participant P as POD R->>+G: Get a CI job. loop G-->R: ; end Note over R,G: POST /api/v4/jobs/request G->>+R: CI job data. R-->>-Kube: Create a POD to run the CI job. Note over R,Kube: POST to Kube API P->>+P: Execute job. Note over P: CI build job = Prepare + Pre-build + Build + Post-build P->>+G: Job logs ``` この図に示されている相互作用は、すべてのKubernetesクラスターで有効です。たとえば、主要パブリッククラウドプロバイダーでホストされているターンキーソリューションや、Self-Managed Kubernetesインストールなどです。 ## Kubernetes APIに接続する {#connect-to-the-kubernetes-api} Kubernetes APIに接続するには次のオプションを使用します。提供されるユーザーアカウントには、指定されたネームスペースでポッドを作成、リストし、ポッドにアタッチするための権限が必要です。 | オプション | 説明 | |-------------|-------------| | `host` | オプションのKubernetes APIサーバーホストのURL(指定されていない場合は自動検出が試行されます)。 | | `context` | お使いの`kubectl`設定から使用するオプションのKubernetesコンテキスト名。`host`を指定しない場合、このオプションを使用します。 | | `cert_file` | オプションのKubernetes APIサーバーユーザー認証証明書。 | | `key_file` | オプションのKubernetes APIサーバーユーザー認証秘密キー。 | | `ca_file` | オプションのKubernetes APIサーバーCA証明書。 | KubernetesクラスターでGitLab Runnerを実行している場合に、GitLab RunnerがKubernetes APIを自動的に検出できるようにするには、これらのフィールドを省略します。 クラスターの外部でGitLab Runnerを実行している場合、これらの設定により、GitLab Runnerがクラスター上のKubernetes APIにアクセスできるようになります。`host`を認証情報とともに指定するか、`context`を使用して`kubectl`設定の特定のコンテキストを参照できます。 ### Kubernetes APIコールのベアラートークンを設定する {#set-the-bearer-token-for-kubernetes-api-calls} ポッドを作成するためにAPIコールのベアラートークンを設定するには、`KUBERNETES_BEARER_TOKEN`変数を使用します。これにより、プロジェクトのオーナーがプロジェクトのシークレット変数を使用してベアラートークンを指定できます。 ベアラートークンを指定する場合は、`Host`設定を指定する必要があります。 ``` yaml variables: KUBERNETES_BEARER_TOKEN: thebearertokenfromanothernamespace ``` ### Runner APIの権限を設定する {#configure-runner-api-permissions} コアAPIグループの権限を設定するには、GitLab Runner Helmチャートの`values.yml`ファイルを更新します。 次のいずれかの方法があります。 - `rbac.create`を`true`に設定します。 - `values.yml`ファイルで、次の権限が付与されているサービスアカウント`serviceAccount.name: `を指定します。 | リソース | 動詞(オプションの機能/設定フラグ) | |----------|-------------------------------| | events | list(`print_pod_warning_events=true`)、watch(`FF_PRINT_POD_EVENTS=true`) | | namespaces | create(`kubernetes.NamespacePerJob=true`)、delete(`kubernetes.NamespacePerJob=true`) | | poddisruptionbudgets | 作成 (`pod_disruption_budget=true`)、取得 (`pod_disruption_budget=true`) | | pods | create、delete、get、list([Informerを使用](#informers))、watch([Informerを使用](#informers)、`FF_KUBERNETES_HONOR_ENTRYPOINT=true`、`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | pods/attach | create(`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`)、delete(`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`)、get(`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`)、patch(`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | pods/exec | create、delete、get、patch | | pods/log | get(`FF_KUBERNETES_HONOR_ENTRYPOINT=true`、`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`、`FF_WAIT_FOR_POD_TO_BE_REACHABLE=true`)、list(`FF_KUBERNETES_HONOR_ENTRYPOINT=true`、`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`) | | secrets | create、delete、get、update | | serviceaccounts | get | | services | create、get | 必要な権限を持つロールを作成するには、次のYAMLロール定義を使用できます。 ```yaml apiVersion: rbac.authorization.k8s.io/v1 kind: Role metadata: name: gitlab-runner namespace: default rules: - apiGroups: [""] resources: ["events"] verbs: - "list" # Required when `print_pod_warning_events=true` - "watch" # Required when `FF_PRINT_POD_EVENTS=true` - apiGroups: [""] resources: ["namespaces"] verbs: - "create" # Required when `kubernetes.NamespacePerJob=true` - "delete" # Required when `kubernetes.NamespacePerJob=true` - apiGroups: ["policy"] resources: ["poddisruptionbudgets"] verbs: - "create" # Required when `pod_disruption_budget=true` - "get" # Required when `pod_disruption_budget=true` - apiGroups: [""] resources: ["pods"] verbs: - "create" - "delete" - "get" - "list" # Required when using Informers (https://docs.gitlab.com/runner/executors/kubernetes/#informers) - "watch" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`, using Informers (https://docs.gitlab.com/runner/executors/kubernetes/#informers) - apiGroups: [""] resources: ["pods/attach"] verbs: - "create" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "delete" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "get" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - "patch" # Required when `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - apiGroups: [""] resources: ["pods/exec"] verbs: - "create" - "delete" - "get" - "patch" - apiGroups: [""] resources: ["pods/log"] verbs: - "get" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false`, `FF_WAIT_FOR_POD_TO_BE_REACHABLE=true` - "list" # Required when `FF_KUBERNETES_HONOR_ENTRYPOINT=true`, `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY=false` - apiGroups: [""] resources: ["secrets"] verbs: - "create" - "delete" - "get" - "update" - apiGroups: [""] resources: ["serviceaccounts"] verbs: - "get" - apiGroups: [""] resources: ["services"] verbs: - "create" - "get" ``` 追加の詳細: - `event`権限はGitLab 16.2.1以降でのみ必要です。 - `namespace`権限は、`namespace_per_job`を使用してネームスペースの分離を有効にする場合にのみ必要です。 - `pods/log`権限は、以下のいずれかのシナリオに該当する場合にのみ必要です: - [`FF_KUBERNETES_HONOR_ENTRYPOINT`機能フラグ](../../configuration/feature-flags.md)が有効になっている場合。 - [`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY`機能フラグ](../../configuration/feature-flags.md)が、[`CI_DEBUG_SERVICES`変数](https://docs.gitlab.com/ci/services/#capturing-service-container-logs)が`true`に設定されている場合に無効になっていること。 - [`FF_WAIT_FOR_POD_TO_BE_REACHABLE`機能フラグ](../../configuration/feature-flags.md)が有効になっている場合。 #### informer {#informers} GitLab Runner 17.9.0以降では、Kubernetes informerがビルドポッドの変更を追跡します。これにより、executorが変更をより迅速に検出できるようになります。 informerには、`pods`に対する`list`権限と`watch`権限が必要です。executorがビルドを開始すると、Kubernetes APIで権限が確認されます。すべての権限が付与されている場合、executorはinformerを使用します。いずれかの権限がない場合には、GitLab Runnerは警告をログに記録します。ビルドは続行され、以前のメカニズムを使用してビルドポッドの状態と変更を追跡します。 ## 設定 {#configuration-settings} Kubernetes executorを設定するには、`config.toml`ファイルで次の設定を使用します。 ### CPUリクエストとCPUの制限 {#cpu-requests-and-limits} | 設定 | 説明 | |---------------------------------------------|-------------| | `cpu_limit` | ビルドコンテナに対して指定されるCPU割り当て。 | | `cpu_limit_overwrite_max_allowed` | ビルドコンテナのCPU割り当てを上書きできる最大量。空の場合、CPU制限上書き機能が無効になります。 | | `cpu_request` | ビルドコンテナに対してリクエストされるCPU割り当て。 | | `cpu_request_overwrite_max_allowed` | ビルドコンテナのCPU割り当てリクエストを上書きできる最大量。空の場合、CPUリクエスト上書き機能が無効になります。 | | `helper_cpu_limit` | ビルドヘルパーコンテナに対して指定されるCPU割り当て。 | | `helper_cpu_limit_overwrite_max_allowed` | ヘルパーコンテナのCPU割り当てを上書きできる最大量。空の場合、CPU制限上書き機能が無効になります。 | | `helper_cpu_request` | ビルドヘルパーコンテナに対してリクエストされるCPU割り当て。 | | `helper_cpu_request_overwrite_max_allowed` | ヘルパーコンテナのCPU割り当てリクエストを上書きできる最大量。空の場合、CPUリクエスト上書き機能が無効になります。 | | `service_cpu_limit` | ビルドサービスコンテナに対して指定されるCPU割り当て。 | | `service_cpu_limit_overwrite_max_allowed` | サービスコンテナのCPU割り当てを上書きできる最大量。空の場合、CPU制限上書き機能が無効になります。 | | `service_cpu_request` | ビルドサービスコンテナに対してリクエストされるCPU割り当て。 | | `service_cpu_request_overwrite_max_allowed` | サービスコンテナのCPU割り当てリクエストを上書きできる最大量。空の場合、CPUリクエスト上書き機能が無効になります。 | | `pod_cpu_limit` | ビルドポッドに割り当てられたCPU割り当て。 | | `pod_cpu_limit_overwrite_max_allowed` | ビルドポッドに書き込み可能なCPU割り当ての最大量。空の場合、CPU制限上書き機能が無効になります。 | | `pod_cpu_request` | ビルドポッドにリクエストされたCPU割り当て。 | | `pod_cpu_request_overwrite_max_allowed` | ビルドポッドに書き込み可能なCPU割り当てリクエストの最大量。空の場合、CPUリクエスト上書き機能が無効になります。 | > [!note] > ポッドレベルのリソース仕様は、[Kubernetes v1.32](https://v1-32.docs.kubernetes.io/blog/2024/12/11/kubernetes-v1-32-release/#pod-level-resource-specifications)でアルファ機能として導入され、[Kubernetes v1.34](https://kubernetes.io/blog/2025/09/22/kubernetes-v1-34-pod-level-resources/)でベータ版に移行しました。 ### メモリのリクエストと制限 {#memory-requests-and-limits} | 設定 | 説明 | |------------------------------------------------|-------------| | `memory_limit` | ビルドコンテナに割り当てられるメモリの量。 | | `memory_limit_overwrite_max_allowed` | ビルドコンテナのメモリ割り当てを上書きできる最大量。空の場合、メモリ制限上書き機能が無効になります。 | | `memory_request` | ビルドコンテナからリクエストされるメモリの量。 | | `memory_request_overwrite_max_allowed` | ビルドコンテナのメモリ割り当てリクエストを上書きできる最大量。空の場合、メモリリクエスト上書き機能が無効になります。 | | `helper_memory_limit` | ビルドヘルパーコンテナに割り当てられるメモリの量。 | | `helper_memory_limit_overwrite_max_allowed` | ヘルパーコンテナのメモリ割り当てを上書きできる最大量。空の場合、メモリ制限上書き機能が無効になります。 | | `helper_memory_request` | ビルドヘルパーコンテナに対してリクエストされるメモリの量。 | | `helper_memory_request_overwrite_max_allowed` | ヘルパーコンテナのメモリ割り当てリクエストを上書きできる最大量。空の場合、メモリリクエスト上書き機能が無効になります。 | | `service_memory_limit` | ビルドサービスコンテナに割り当てられるメモリの量。 | | `service_memory_limit_overwrite_max_allowed` | サービスコンテナのメモリ割り当てを上書きできる最大量。空の場合、メモリ制限上書き機能が無効になります。 | | `service_memory_request` | ビルドサービスコンテナにリクエストされるメモリの量。 | | `service_memory_request_overwrite_max_allowed` | サービスコンテナのメモリ割り当てリクエストを上書きできる最大量。空の場合、メモリリクエスト上書き機能が無効になります。 | | `pod_memory_limit` | ビルドポッドに割り当てられたメモリ量。 | | `pod_memory_limit_overwrite_max_allowed` | ビルドポッドに書き込み可能なメモリ割り当ての最大量。空の場合、メモリ制限上書き機能が無効になります。 | | `pod_memory_request` | ビルドポッドにリクエストされたメモリ量。 | | `pod_memory_request_overwrite_max_allowed` | ビルドポッドに書き込み可能なメモリ割り当てリクエストの最大量。空の場合、メモリリクエスト上書き機能が無効になります。 | #### ヘルパーコンテナのメモリサイジングの推奨事項 {#helper-container-memory-sizing-recommendations} 最適なパフォーマンスを得るには、ワークロードの要件に基づいてヘルパーコンテナのメモリ制限を設定します: - **Workloads with caching and artifact generation**: 最低250 MiB - **Basic workloads without cache/artifacts**: より低い制限 (128~200 MiB) でも機能する可能性があります。 **Basic configuration example:** ```toml [[runners]] executor = "kubernetes" [runners.kubernetes] helper_memory_limit = "250Mi" helper_memory_request = "250Mi" helper_memory_limit_overwrite_max_allowed = "1Gi" ``` **Job-specific memory overrides:**: `KUBERNETES_HELPER_MEMORY_LIMIT`変数を使用して、管理者の変更を必要とせずに特定のジョブのメモリを調整します: ```yaml job_with_higher_helper_memory_limit: variables: KUBERNETES_HELPER_MEMORY_LIMIT: "512Mi" script: ``` このアプローチにより、デベロッパーは`helper_memory_limit_overwrite_max_allowed`を介してクラスター全体の制限を維持しながら、ジョブごとのリソース使用量を最適化できます。 ### ストレージのリクエストと制限 {#storage-requests-and-limits} | 設定 | 説明 | |-----------------------------------------------------------|-------------| | `ephemeral_storage_limit` | ビルドコンテナのエフェメラルストレージ制限。 | | `ephemeral_storage_limit_overwrite_max_allowed` | ビルドコンテナのエフェメラルストレージ制限を上書きできる最大量。空の場合、エフェメラルストレージ制限上書き機能が無効になります。 | | `ephemeral_storage_request` | ビルドコンテナに対して指定されるエフェメラルストレージリクエスト。 | | `ephemeral_storage_request_overwrite_max_allowed` | ビルドコンテナのエフェメラルストレージリクエストを上書きできる最大量。空の場合、エフェメラルストレージリクエスト上書き機能が無効になります。 | | `helper_ephemeral_storage_limit` | ヘルパーコンテナに対して指定されるエフェメラルストレージ制限。 | | `helper_ephemeral_storage_limit_overwrite_max_allowed` | ヘルパーコンテナのエフェメラルストレージ制限を上書きできる最大量。空の場合、エフェメラルストレージリクエスト上書き機能が無効になります。 | | `helper_ephemeral_storage_request` | ヘルパーコンテナに対して指定されるエフェメラルストレージリクエスト。 | | `helper_ephemeral_storage_request_overwrite_max_allowed` | ヘルパーコンテナのエフェメラルストレージリクエストを上書きできる最大量。空の場合、エフェメラルストレージリクエスト上書き機能が無効になります。 | | `service_ephemeral_storage_limit` | サービスコンテナに対して指定されるエフェメラルストレージ制限。 | | `service_ephemeral_storage_limit_overwrite_max_allowed` | サービスコンテナのエフェメラルストレージ制限を上書きできる最大量。空の場合、エフェメラルストレージリクエスト上書き機能が無効になります。 | | `service_ephemeral_storage_request` | サービスコンテナに対して指定されるエフェメラルストレージリクエスト。 | | `service_ephemeral_storage_request_overwrite_max_allowed` | サービスコンテナのエフェメラルストレージリクエストを上書きできる最大量。空の場合、エフェメラルストレージリクエスト上書き機能が無効になります。 | ### `config.toml`のその他の設定 {#other-configtoml-settings} | 設定 | 説明 | |-----------------------------------------------|-------------| | `affinity` | ビルドを実行するノードを決定するアフィニティルールを指定します。[アフィニティの使用](#define-a-list-of-node-affinities)についての詳細を参照してください。 | | `allow_privilege_escalation` | `allowPrivilegeEscalation`フラグを有効にしてすべてのコンテナを実行します。空の場合、コンテナ`SecurityContext`の`allowPrivilegeEscalation`フラグは定義されず、Kubernetesはデフォルトの[特権エスカレーション](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/)動作を使用できます。 | | `allowed_groups` | コンテナグループに指定できるグループIDの配列。存在しない場合、すべてのグループが許可されます。詳細については、[コンテナユーザーとグループの設定](#configure-container-user-and-group)を参照してください。 | | `allowed_images` | `.gitlab-ci.yml`で指定できるイメージのワイルドカードリスト。この設定が存在しない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[詳細](#restrict-docker-images-and-services)を参照してください。 | | `allowed_pull_policies` | `.gitlab-ci.yml`ファイルまたは`config.toml`ファイルで指定できるプルポリシーのリスト。 | | `allowed_services` | `.gitlab-ci.yml`で指定できるサービスのワイルドカードリスト。この設定が存在しない場合は、すべてのイメージが許可されます(`["*/*:*"]`と同等)。[詳細](#restrict-docker-images-and-services)を参照してください。 | | `allowed_users` | コンテナユーザーに指定できるユーザーIDの配列。存在しない場合、すべてのユーザーが許可されます。詳細については、[コンテナユーザーとグループの設定](#configure-container-user-and-group)を参照してください。 | | `automount_service_account_token` | サービスアカウントトークンをビルドポッドに自動的にマウントするかどうかを制御するブール値。 | | `bearer_token` | ビルドポッドの起動に使用されるデフォルトのベアラートークン。 | | `bearer_token_overwrite_allowed` | ビルドポッドの作成に使用されるベアラートークンをプロジェクトが指定できるようにするブール値。 | | `build_container_security_context` | ビルドコンテナのコンテナセキュリティコンテキストを設定します。[セキュリティコンテキストの詳細](#set-a-security-policy-for-the-pod)を参照してください。 | | `cap_add` | ジョブポッドコンテナに追加するLinux機能を指定します。[Kubernetes executorでの機能設定の詳細](#specify-container-capabilities)を参照してください。 | | `cap_drop` | ジョブポッドコンテナから削除するLinux機能を指定します。[Kubernetes executorでの機能設定の詳細](#specify-container-capabilities)を参照してください。 | | `cleanup_grace_period_seconds` | ジョブの完了後、ポッドが正常に終了するまでの秒数。この期間を過ぎると、プロセスはkill(強制終了)シグナルによって強制的に停止します。`terminationGracePeriodSeconds`が指定されている場合は無視されます。 | | `context` | `kubectl`設定から使用するKubernetesコンテキスト名 (`host`が指定されていない場合)。 | | `dns_policy` | ポッドの作成時に使用するDNSポリシー(`none`、`default`、`cluster-first`、`cluster-first-with-host-net`)を指定します。設定されていない場合は、Kubernetesのデフォルト(`cluster-first`)が使用されます。 | | `dns_config` | ポッドの作成時に使用するDNS設定を指定します。[ポッドのDNS設定の使用についての詳細](#configure-pod-dns-settings)を参照してください。 | | `helper_container_security_context` | ヘルパーコンテナのコンテナセキュリティコンテキストを設定します。[セキュリティコンテキストの詳細](#set-a-security-policy-for-the-pod)を参照してください。 | | `helper_image` | (上級者向け)リポジトリのクローンとアーティファクトのアップロードに使用される[デフォルトのヘルパーイメージを上書きします](../../configuration/advanced-configuration.md#helper-image)。 | | `helper_image_flavor` | ヘルパーイメージのフレーバー (`alpine`、`alpine3.21`、または`ubuntu`) を設定します。`alpine`がデフォルトです。`alpine`を使用する場合、これは`alpine3.21`と同じです。 | | `host_aliases` | すべてのコンテナに追加される追加のホスト名エイリアスのリスト。[追加のホストエイリアスの使用についての詳細](#add-extra-host-aliases)を参照してください。 | | `image_pull_secrets` | プライベートレジストリからのDockerイメージのプルを認証するために使用されるKubernetes `docker-registry`シークレット名を含むアイテムの配列。 | | `init_permissions_container_security_context` | init-permissionsコンテナのコンテナセキュリティコンテキストを設定します。[セキュリティコンテキストの詳細](#set-a-security-policy-for-the-pod)を参照してください。 | | `namespace` | Kubernetesポッドを実行するネームスペース。 | | `namespace_per_job` | ジョブを個別のネームスペースに隔離します。有効にすると、`namespace`と`namespace_overwrite_allowed`は無視されます。 | | `namespace_overwrite_allowed` | ネームスペース上書き環境変数の内容を検証する正規表現(下記を参照)。空の場合、ネームスペース上書き機能が無効になります。 | | `node_selector` | `string=string`(環境変数の場合は`string:string`)形式の`key=value`ペアの`table`。これを設定すると、ポッドの作成は、すべての`key=value`ペアに一致するKubernetesノードに制限されます。[ノードセレクターの使用についての詳細](#specify-the-node-to-execute-builds)を参照してください。 | | `node_tolerations` | `string=string:string`形式の`"key=value" = "Effect"`ペアの`table`。これを設定すると、ポッドは、許容されるすべてのtaintまたはその一部を持つノードでスケジュールできます。環境変数設定では、1つのtolerationのみを指定できます。`key`、`value`、および`effect`は、Kubernetesポッドのtoleration設定の対応するフィールド名と一致します。 | | `pod_annotations` | `string=string`形式の`key=value`ペアの`table`。この`table`には、Runnerによって作成された各ビルドポッドに追加されるアノテーションのリストが含まれています。これらの値には、拡張用の環境変数を含めることができます。ポッドのアノテーションは、各ビルドで上書きできます。 | | `pod_annotations_overwrite_allowed` | ポッドアノテーション上書き環境変数の内容を検証する正規表現。空の場合、ポッドアノテーション上書き機能が無効になります。 | | `pod_labels` | `string=string`形式の`key=value`ペアの`table`。この`table`には、Runnerによって作成された各ビルドポッドに追加されるラベルのリストが含まれています。これらの値には、拡張用の環境変数を含めることができます。各ビルドでポッドラベルを上書きするには、`pod_labels_overwrite_allowed`を使用します。 | | `pod_labels_overwrite_allowed` | ポッドラベル上書き環境変数の内容を検証する正規表現。空の場合、ポッドラベルの上書き機能が無効になります。`runner.gitlab.com`ラベルネームスペースのポッドラベルは上書きできないことに注意してください。 | | `pod_security_context` | 設定ファイルで設定されている場合、これによりビルドポッドのポッドセキュリティコンテキストが設定されます。[セキュリティコンテキストの詳細](#set-a-security-policy-for-the-pod)を参照してください。 | | `pod_termination_grace_period_seconds` | ポッドが正常に終了するまでの秒数を決定するポッドレベルの設定です。この期間を過ぎると、プロセスはkill(強制終了)シグナルによって強制的に停止します。`terminationGracePeriodSeconds`が指定されている場合は無視されます。 | | `poll_interval` | RunnerがKubernetesポッドを作成した直後に、その状態を確認するためにポッドをポーリングする頻度(秒単位)(デフォルト= 3)。 | | `poll_timeout` | Runnerが作成したコンテナへの接続を試行する際に、タイムアウトになるまでの経過時間(秒単位)。クラスターが一度に処理できるビルドの数を上回るビルドをキューに入れる場合に、この設定を使用します(デフォルト= 180)。 | | `cleanup_resources_timeout` | ジョブの完了後にKubernetesリソースをクリーンアップするための合計時間。サポートされている構文は`1h30m`、`300s`、`10m`です。デフォルトは5分(`5m`)です。 | | `priority_class_name` | ポッドに設定する優先度クラスを指定します。設定されていない場合は、デフォルトの優先度クラスが使用されます。 | | `privileged` | 特権フラグを指定してコンテナを実行します。 | | `pull_policy` | イメージプルポリシー(`never`、`if-not-present`、`always`)を指定します。設定されていない場合は、クラスターのイメージの[デフォルトプルポリシー](https://kubernetes.io/docs/concepts/containers/images/#updating-images)が使用されます。複数のプルポリシーの設定方法と詳細については、[プルポリシーの使用](#set-a-pull-policy)を参照してください。[`if-not-present`および`never`のセキュリティに関する考慮事項](../../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy)も参照してください。[プルポリシーを制限する](#restrict-docker-pull-policies)こともできます。 | | `resource_availability_check_max_attempts` | 設定されたリソース(サービスアカウントとプルシークレット)が使用可能であるかどうかを確認する操作の最大試行回数。この回数を超えると試行されなくなります。各試行の間隔は5秒です。[準備ステップでのリソースチェックについての詳細](#resources-check-during-prepare-step)を参照してください。 | | `runtime_class_name` | 作成されたすべてのポッドに使用するランタイムクラス。クラスターでこの機能がサポートされていない場合、ジョブは終了または失敗します。 | | `service_container_security_context` | サービスコンテナのコンテナセキュリティコンテキストを設定します。[セキュリティコンテキストの詳細](#set-a-security-policy-for-the-pod)を参照してください。 | | `scheduler_name` | ビルドポッドのスケジュールに使用するスケジューラ。 | | `service_account` | ジョブ/executorポッドがKubernetes APIと通信するために使用するデフォルトのサービスアカウント。 | | `service_account_overwrite_allowed` | サービスアカウント上書き環境変数の内容を検証する正規表現。空の場合、サービスアカウント上書き機能が無効になります。 | | `services` | [サイドカーパターン](https://learn.microsoft.com/en-us/azure/architecture/patterns/sidecar)を使用してビルドコンテナにアタッチされている[サービス](https://docs.gitlab.com/ci/services/)のリスト。[サービスの使用](#define-a-list-of-services)についての詳細を参照してください。 | | `use_service_account_image_pull_secrets` | 有効にすると、executorによって作成されるポッドに`imagePullSecrets`が含まれなくなります。これにより、ポッドは[サービスアカウントの`imagePullSecrets`](https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/#add-image-pull-secret-to-service-account)(設定されている場合)を使用して作成されます。 | | `terminationGracePeriodSeconds` | ポッドで実行されているプロセスに自動終了シグナルが送信された時点から、プロセスがkill(強制終了)シグナルで強制的に停止されるまでの期間。[`cleanup_grace_period_seconds`と`pod_termination_grace_period_seconds`が優先され、これは非推奨になりました](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28165)。 | | `volumes` | 設定ファイルで設定され、ビルドコンテナにマウントされるボリュームのリスト。[ボリュームの使用](#configure-volume-types)についての詳細を参照してください。 | | `pod_spec` | これは実験的な設定です。Runnerマネージャーによって生成されるポッド仕様を、CIジョブの実行に使用されるポッドで設定された設定のリストで上書きします。`Kubernetes Pod Specification`にリストされているすべてのプロパティを設定できます。詳細については、[生成されたポッド仕様を上書きする(実験的機)](#overwrite-generated-pod-specifications)を参照してください。 | | `retry_limit` | Kubernetes APIとの通信を試行する操作の最大回数。各試行の間の再試行間隔は、バックオフアルゴリズムに基づき、500ミリ秒から始まります。 | | `retry_backoff_max` | 各試行で到達する再試行間隔のカスタム最大バックオフ値(ミリ秒単位)。デフォルト値は2000ミリ秒で、500ミリ秒未満の値にすることはできません。各試行で到達するデフォルトの最大試行間隔は2秒です。これは`retry_backoff_max`を使用してカスタマイズできます。 | | `retry_limits` | 各リクエストエラーの再試行回数。 | | `logs_base_dir` | ビルドログを保存するために生成されたパスの前に付加されるベースディレクトリ。詳細については、[ビルドログとスクリプトのベースディレクトリを変更する](#change-the-base-directory-for-build-logs-and-scripts)を参照してください。 | | `scripts_base_dir` | ビルドスクリプトを保存するために生成されたパスの前に付加されるベースディレクトリ。詳細については、[ビルドログとスクリプトのベースディレクトリを変更する](#change-the-base-directory-for-build-logs-and-scripts)を参照してください。 | | `print_pod_warning_events` | 有効にすると、ジョブ失敗時に、ポッドに関連付けられているすべての警告イベントがこの機能により取得されます。この機能はデフォルトで有効になっており、少なくとも[`events: list`の権限](#configure-runner-api-permissions)を付与されたサービスアカウントが必要です。 | | `pod_disruption_budget` | 有効にすると、ジョブポッドごとに[`PodDisruptionBudget`](https://kubernetes.io/docs/tasks/run-application/configure-pdb/)が作成され、ノードのドレインやクラスターのアップグレードなどの自主的な中断中の退去を防ぎます。デフォルトでは無効になっています。[`poddisruptionbudgets`権限](#configure-runner-api-permissions)を持つサービスアカウントが必要です。 | ### 設定例 {#configuration-example} 次のサンプルは、Kubernetes executorの`config.toml`ファイルの設定例を示しています。 ```toml concurrent = 4 [[runners]] name = "myRunner" url = "https://gitlab.com/ci" token = "......" executor = "kubernetes" [runners.kubernetes] host = "https://45.67.34.123:4892" cert_file = "/etc/ssl/kubernetes/api.crt" key_file = "/etc/ssl/kubernetes/api.key" ca_file = "/etc/ssl/kubernetes/ca.crt" namespace = "gitlab" namespace_overwrite_allowed = "ci-.*" bearer_token_overwrite_allowed = true privileged = true cpu_limit = "1" memory_limit = "1Gi" service_cpu_limit = "1" service_memory_limit = "1Gi" helper_cpu_limit = "500m" helper_memory_limit = "100Mi" poll_interval = 5 poll_timeout = 3600 dns_policy = "cluster-first" priority_class_name = "priority-1" logs_base_dir = "/tmp" scripts_base_dir = "/tmp" [runners.kubernetes.node_selector] gitlab = "true" [runners.kubernetes.node_tolerations] "node-role.kubernetes.io/master" = "NoSchedule" "custom.toleration=value" = "NoSchedule" "empty.value=" = "PreferNoSchedule" "onlyKey" = "" ``` ## executorサービスアカウントを設定する {#configure-the-executor-service-account} executorサービスアカウントを設定するには、`KUBERNETES_SERVICE_ACCOUNT`環境変数を設定するか、`--kubernetes-service-account`フラグを使用します。 ## ポッドとコンテナ {#pods-and-containers} ジョブの実行方法を制御するようにポッドとコンテナを設定できます。 ### ジョブポッドのデフォルトのラベル {#default-labels-for-job-pods} > [!warning] > これらのラベルをRunnerの設定または`.gitlab-ci.yml`ファイルでオーバーライドすることはできません。`runner.gitlab.com`ネームスペースでラベルを設定または変更する操作は無視され、デバッグメッセージとして記録されます。 | キー | 説明 | |--------------------------------------------|-------------| | `project.runner.gitlab.com/id` | プロジェクトのID。GitLabインスタンスのすべてのプロジェクトで一意のIDです。 | | `project.runner.gitlab.com/name` | プロジェクトの名前。 | | `project.runner.gitlab.com/namespace-id` | プロジェクトのネームスペースのID。 | | `project.runner.gitlab.com/namespace` | プロジェクトのネームスペースの名前。 | | `project.runner.gitlab.com/root-namespace` | プロジェクトのルートネームスペースのID。たとえば`/gitlab-org/group-a/subgroup-a/project`の場合、ルートネームスペースは`gitlab-org`です。 | | `manager.runner.gitlab.com/name` | このジョブを起動したRunner設定の名前。 | | `manager.runner.gitlab.com/id-short` | ジョブを起動したRunner設定のID。 | | `job.runner.gitlab.com/pod` | Kubernetes executorによって使用される内部ラベル。 | ### ジョブポッドのデフォルトのアノテーション {#default-annotations-for-job-pods} ジョブを実行しているポッドには、デフォルトで次のアノテーションが追加されます。 | キー | 説明 | |------------------------------------|-------------| | `job.runner.gitlab.com/id` | ジョブのID。GitLabインスタンスのすべてのジョブにおいて一意のIDです。 | | `job.runner.gitlab.com/url` | ジョブの詳細のURL。 | | `job.runner.gitlab.com/sha` | プロジェクトがビルドされるコミットリビジョン。 | | `job.runner.gitlab.com/before_sha` | ブランチまたはタグに存在する、以前の最新コミット。 | | `job.runner.gitlab.com/ref` | プロジェクトのビルド対象のブランチまたはタグの名前。 | | `job.runner.gitlab.com/name` | ジョブの名前。 | | `job.runner.gitlab.com/timeout` | 時間の長さで指定する形式のジョブ実行タイムアウト。たとえば、`2h3m0.5s`などです。 | | `project.runner.gitlab.com/id` | ジョブのプロジェクトID。 | デフォルトのアノテーションを上書きするには、GitLab Runner設定で`pod_annotations`を使用します。各CI/CDジョブのアノテーションは、[`.gitlab-ci.yml`ファイル](#overwrite-pod-annotations)で上書きすることもできます。 ### ポッドのライフサイクル {#pod-lifecycle} [ポッドのライフサイクル](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#lifecycle)は、次の影響を受ける可能性があります。 - `TOML`設定ファイルでの`pod_termination_grace_period_seconds`プロパティの設定。ポッドで実行されているプロセスは、`TERM`シグナルの送信後に指定された期間にわたって実行できます。この期間が経過してもポッドが正常に終了しない場合は、kill(強制終了)シグナルが送信されます。 - [`FF_USE_POD_ACTIVE_DEADLINE_SECONDS`機能フラグ](../../configuration/feature-flags.md)の有効化。有効にすると、ジョブがタイムアウトしたときに、CI/CDジョブを実行しているポッドは失敗としてマークされ、関連付けられているすべてのコンテナが強制終了されます。最初にGitLabでジョブをタイムアウトさせるには、`activeDeadlineSeconds`を`configured timeout + 1 second`に設定します。 > [!note] > `FF_USE_POD_ACTIVE_DEADLINE_SECONDS`機能フラグを有効にして`pod_termination_grace_period_seconds`をゼロ以外の値に設定した場合、CI/CDジョブポッドはすぐに終了しません。ポッドの`terminationGracePeriods`により、有効期限が切れた場合にのみポッドが終了するようになります。 ### ジョブポッドを退去から保護する {#protect-job-pods-from-eviction} {{< history >}} - [導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/6331)されました (GitLab Runner 18.10)。 {{< /history >}} ノードドレインやクラスターアップグレードなどの[自主的な中断](https://kubernetes.io/docs/concepts/workloads/pods/disruptions/#voluntary-and-involuntary-disruptions)からジョブポッドを保護するには、`pod_disruption_budget`オプションを有効にします。 これを有効にすると、ジョブポッドごとに`minAvailable: 1`の[`PodDisruptionBudget`](https://kubernetes.io/docs/tasks/run-application/configure-pdb/)が作成されます。このアクションは、自主的な中断中にKubernetesの退去APIがポッドを退去させるのを防ぎます。 ```toml [runners.kubernetes] pod_disruption_budget = true ``` `PodDisruptionBudget`: - Kubernetesオーナー参照によってジョブポッドが削除されると、自動的に削除されます。 - ノード障害やメモリ不足による強制終了などの偶発的な中断からは保護しません。 - 追加のRBAC権限が必要です。詳細については、[Runner API権限の設定](#configure-runner-api-permissions)を参照してください。 > [!warning] > `PodDisruptionBudget`を有効にすると、ジョブの実行中にノードのドレインがハングする可能性があります。クラスターのアップグレード戦略が、潜在的なノードドレインの遅延を考慮しているか、またはジョブタイムアウトを使用してジョブの実行時間を制限するようにしてください。 ### ポッドのtolerationを上書きする {#overwrite-pod-tolerations} Kubernetesポッドのtolerationを上書きするには、次のようにします。 1. `config.toml`ファイルまたはHelm `values.yaml`ファイルでCIジョブポッドのtolerationの上書きを有効にするには、`node_tolerations_overwrite_allowed`の正規表現を定義します。この正規表現は、名前が`KUBERNETES_NODE_TOLERATIONS_`で始まるCI変数の値を検証します。 ```toml runners: ... config: | [[runners]] [runners.kubernetes] node_tolerations_overwrite_allowed = ".*" ``` 1. CIジョブポッドtolerationを上書きするため、`.gitlab-ci.yml`ファイルで1つ以上のCI変数を定義します。 ```yaml variables: KUBERNETES_NODE_TOLERATIONS_1: 'node-role.kubernetes.io/master:NoSchedule' KUBERNETES_NODE_TOLERATIONS_2: 'custom.toleration=value:NoSchedule' KUBERNETES_NODE_TOLERATIONS_3: 'empty.value=:PreferNoSchedule' KUBERNETES_NODE_TOLERATIONS_4: 'onlyKey' KUBERNETES_NODE_TOLERATIONS_5: '' # tolerate all taints ``` ### ポッドラベルを上書きする {#overwrite-pod-labels} 各CI/CDジョブのKubernetesポッドラベルを上書きするには、次の手順に従います。 1. `.config.yaml`ファイルで`pod_labels_overwrite_allowed`の正規表現を定義します。 1. `.gitlab-ci.yml`ファイルで、値`key=value`を持つ`KUBERNETES_POD_LABELS_*`変数を設定します。ポッドラベルは`key=value`で上書きされます。複数の値を適用できます。 ```yaml variables: KUBERNETES_POD_LABELS_1: "Key1=Val1" KUBERNETES_POD_LABELS_2: "Key2=Val2" KUBERNETES_POD_LABELS_3: "Key3=Val3" ``` > [!warning] > `runner.gitlab.com`ネームスペースのラベルは読み取り専用です。GitLabは、これらのGitLab内部ラベルの追加、変更、または削除の試行操作をすべて無視します。 ### ポッドアノテーションを上書きする {#overwrite-pod-annotations} 各CI/CDジョブのKubernetesポッドアノテーションを上書きするには、次の手順に従います。 1. `.config.yaml`ファイルで`pod_annotations_overwrite_allowed`の正規表現を定義します。 1. `.gitlab-ci.yml`ファイルで`KUBERNETES_POD_ANNOTATIONS_*`変数を設定し、値として`key=value`を使用します。ポッドアノテーションは`key=value`で上書きされます。複数のアノテーションを指定できます。 ```yaml variables: KUBERNETES_POD_ANNOTATIONS_1: "Key1=Val1" KUBERNETES_POD_ANNOTATIONS_2: "Key2=Val2" KUBERNETES_POD_ANNOTATIONS_3: "Key3=Val3" ``` 以下の例では、`pod_annotations`と`pod_annotations_overwrite_allowed`が設定されています。この設定により、`config.toml`で設定されている`pod_annotations`の上書きが許可されます。 ```toml [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] image = "alpine" pod_annotations_overwrite_allowed = ".*" [runners.kubernetes.pod_annotations] "Key1" = "Val1" "Key2" = "Val2" "Key3" = "Val3" "Key4" = "Val4" ``` ### 生成されたポッド仕様を上書きする {#overwrite-generated-pod-specifications} {{< details >}} - ステータス: ベータ版 {{< /details >}} この機能は[ベータ版](https://docs.gitlab.com/policy/development_stages_support/#beta)です。本番環境のクラスターで使用する前に、テストKubernetesクラスターでこの機能を使用することを強くお勧めします。この機能を使用するには、`FF_USE_ADVANCED_POD_SPEC_CONFIGURATION`[機能フラグ](../../configuration/feature-flags.md)を有効にする必要があります。 機能が一般提供される前にフィードバックを追加するには、[イシュー556286](https://gitlab.com/gitlab-org/gitlab/-/issues/556286)にコメントを残してください。 Runnerマネージャーによって生成された`PodSpec`を変更するには、`config.toml`ファイルで`pod_spec`設定を使用します。 Runnerオペレーター固有の設定については、[パッチ構造](../../configuration/configuring_runner_operator.md#patch-structure)を参照してください。 `pod_spec`設定により次のようになります。 - 生成されたポッド仕様のフィールドを上書きして補完します。 - `config.toml`の`[runners.kubernetes]`で設定された可能性のある設定値を上書きします。 複数の`pod_spec`設定を指定できます。 | 設定 | 説明 | |--------------|-------------| | `name` | カスタム`pod_spec`に付けられた名前。 | | `patch_path` | 最終的な`PodSpec`オブジェクトの生成前に、このオブジェクトに適用する変更を定義するファイルのパス。このファイルはJSONまたはYAMLファイルである必要があります。 | | `patch` | 最終的な`PodSpec`オブジェクトの生成前にこのオブジェクトに適用する必要がある変更を記述するJSONまたはYAML形式の文字列。 | | `patch_type` | GitLab Runnerによって生成された`PodSpec`オブジェクトに対して指定された変更を適用するためにRunnerが使用する戦略。指定できる値は、`merge`、`json`、`strategic`です。 | 同じ`pod_spec`設定で`patch_path`と`patch`を設定することはできません。このように設定するとエラーが発生します。 `config.toml`での複数の`pod_spec`設定の例を以下に示します。 ```toml [[runners]] [runners.kubernetes] [[runners.kubernetes.pod_spec]] name = "hostname" patch = ''' hostname: "custom-pod-hostname" ''' patch_type = "merge" [[runners.kubernetes.pod_spec]] name = "subdomain" patch = ''' subdomain: "subdomain" ''' patch_type = "strategic" [[runners.kubernetes.pod_spec]] name = "terminationGracePeriodSeconds" patch = ''' [{"op": "replace", "path": "/terminationGracePeriodSeconds", "value": 60}] ''' patch_type = "json" ``` #### マージパッチ戦略 {#merge-patch-strategy} `merge`パッチ戦略は、既存の`PodSpec`に[キー/値置換](https://datatracker.ietf.org/doc/html/rfc7386)を適用します。この戦略を使用する場合、`config.toml`の`pod_spec`設定により、最終的な`PodSpec`オブジェクトの生成前に、このオブジェクトの値が**上書き**されます。値が完全に上書きされるので、このパッチ戦略を使用する際には十分に注意してください。 `merge`パッチ戦略を使用する`pod_spec`設定の例を以下に示します。 ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "build envvars" patch = ''' containers: - env: - name: env1 value: "value1" - name: env2 value: "value2" name: build ''' patch_type = "merge" ``` この設定では、最終的な`PodSpec`には、2つの環境変数(`env1`と`env2`)を持つ1つのコンテナ(`build`)のみが含まれています。上記の例では、次のようになるために関連するCIジョブが失敗します。 - `helper`コンテナ仕様が削除されます。 - `build`コンテナ仕様は、GitLab Runnerによって設定された必要なすべての設定を失います。 ジョブの失敗を防ぐために、この例では、GitLab Runnerによって生成された未変更のプロパティが`pod_spec`に含まれている必要があります。 #### JSONパッチ戦略 {#json-patch-strategy} `json`パッチ戦略は、[JSONパッチ仕様](https://datatracker.ietf.org/doc/html/rfc6902)を使用して`PodSpec`のオブジェクトと配列の更新を制御します。`array`プロパティではこの戦略を使用できません。 `json`パッチ戦略を使用する`pod_spec`設定の例を以下に示します。この設定では、新しい`key: value pair`が既存の`nodeSelector`に追加されます。既存の値は上書きされません。 ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "val1 node" patch = ''' [{ "op": "add", "path": "/nodeSelector", "value": { key1: "val1" } }] ''' patch_type = "json" ``` #### strategicパッチ戦略 {#strategic-patch-strategy} この`strategic`パッチ戦略は、`PodSpec`オブジェクトの各フィールドに適用されている既存の`patchStrategy`を使用します。 `strategic`パッチ戦略を使用する`pod_spec`設定の例を以下に示します。この設定では、ビルドコンテナに`resource request`が設定されています。 ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "cpu request 500m" patch = ''' containers: - name: build resources: requests: cpu: "500m" ''' patch_type = "strategic" ``` この設定では、ビルドコンテナに`resource request`が設定されています。 #### ベストプラクティス {#best-practices} - 本番環境にデプロイする前に、テスト環境で追加された`pod_spec`をテストします。 - GitLab Runnerによって生成された仕様に対し、`pod_spec`設定が悪影響を与えないことを確認します。 - 複雑なポッド仕様の更新には、`merge`パッチ戦略を使用しないでください。 - `config.toml`が利用可能な場合は、可能な限りこの設定を使用してください。たとえば次の設定では、設定された環境変数を既存のリストに追加するのではなく、GitLab Runnerによって設定された最初の環境変数を、カスタム`pod_spec`で設定された環境変数に置き換えます。 ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.example.com" id = 0 token = "__REDACTED__" token_obtained_at = 0001-01-01T00:00:00Z token_expires_at = 0001-01-01T00:00:00Z executor = "kubernetes" shell = "bash" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true", "CUSTOM_VAR=value"] [runners.kubernetes] image = "alpine" ... [[runners.kubernetes.pod_spec]] name = "build envvars" patch = ''' containers: - env: - name: env1 value: "value1" name: build ''' patch_type = "strategic" ``` #### ポッド仕様を変更して各ビルドジョブの`PVC`を作成する {#create-a-pvc-for-each-build-job-by-modifying-the-pod-spec} 各ビルドジョブの[PersistentVolumeClaim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/)を作成するには、[ポッド仕様機能](#overwrite-generated-pod-specifications)を有効にする方法を確認してください。 Kubernetesでは、ポッドのライフサイクルにアタッチされた一時的な[PersistentVolumeClaim](https://kubernetes.io/docs/concepts/storage/persistent-volumes/)を作成できます。このアプローチは、Kubernetesクラスターで[動的プロビジョニング](https://kubernetes.io/docs/concepts/storage/dynamic-provisioning/)が有効になっている場合に機能します。各`PVC`は、新しい[ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/)をリクエストできます。ボリュームはポッドのライフサイクルにも関連付けられています。 [動的プロビジョニング](https://kubernetes.io/docs/concepts/storage/dynamic-provisioning/)を有効にした後で、一時的な`PVC`を作成するために`config.toml`を次のように変更できます。 ```toml [[runners.kubernetes.pod_spec]] name = "ephemeral-pvc" patch = ''' containers: - name: build volumeMounts: - name: builds mountPath: /builds - name: helper volumeMounts: - name: builds mountPath: /builds volumes: - name: builds ephemeral: volumeClaimTemplate: spec: storageClassName: accessModes: [ ReadWriteOnce ] resources: requests: storage: 1Gi ''' ``` ### ポッドのセキュリティポリシーを設定する {#set-a-security-policy-for-the-pod} ビルドポッドのセキュリティポリシーを設定するには、`config.toml`で[セキュリティコンテキスト](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/)を設定します。 次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |-----------------------|------------|----------|-------------| | `fs_group` | `int` | いいえ | ポッド内のすべてのコンテナに適用される特別な追加グループ。 | | `run_as_group` | `int` | いいえ | コンテナプロセスのエントリポイントを実行するGID。 | | `run_as_non_root` | ブール値 | いいえ | コンテナを非rootユーザーとして実行する必要があることを示します。 | | `run_as_user` | `int` | いいえ | コンテナプロセスのエントリポイントを実行するUID。 | | `supplemental_groups` | `int`リスト | いいえ | コンテナのプライマリGIDに加えて、各コンテナで最初に実行されるプロセスに適用されるグループのリスト。 | | `selinux_type` | `string` | いいえ | ポッド内のすべてのコンテナに適用されるSELinuxタイプラベル。 | `config.toml`のポッドセキュリティコンテキストの例を以下に示します。 ```toml concurrent = %(concurrent)s check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registry.example.com/helper:latest" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 run_as_group = 59417 fs_group = 59417 ``` ### 古いRunnerポッドを削除する {#remove-old-runner-pods} 古いRunnerポッドがクリアされないことがあります。これは、Runnerマネージャーが誤ってシャットダウンされた場合に発生する可能性があります。 この状況に対処するには、GitLab Runner Pod Cleanupアプリケーションを使用して、古いポッドのクリーンアップをスケジュールできます。詳細については、以下を参照してください。 - GitLab Runner Pod Cleanupプロジェクトの[README](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pod-cleanup/-/blob/main/readme.md)。 - GitLab Runner Pod Cleanupの[ドキュメント](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pod-cleanup/-/blob/main/docs/README.md)。 ### コンテナのセキュリティポリシーを設定する {#set-a-security-policy-for-the-container} ビルドポッド、ヘルパーポッド、またはサービスポッドのコンテナセキュリティポリシーを設定するため、`config.toml` executorで[コンテナセキュリティコンテキスト](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/)を設定します。 次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |---------------------|-------------|----------|-------------| | `run_as_group` | int | いいえ | コンテナプロセスのエントリポイントを実行するGID。 | | `run_as_non_root` | ブール値 | いいえ | コンテナを非rootユーザーとして実行する必要があることを示します。 | | `run_as_user` | int | いいえ | コンテナプロセスのエントリポイントを実行するUID。 | | `capabilities.add` | 文字列リスト | いいえ | コンテナの実行時に追加する機能。 | | `capabilities.drop` | 文字列リスト | いいえ | コンテナの実行時に削除する機能。 | | `selinux_type` | 文字列 | いいえ | コンテナプロセスに関連付けられているSELinuxタイプラベル。 | 次の`config.toml`の例では、セキュリティコンテキスト設定により、次のようになります。 - ポッドセキュリティコンテキストが設定されます。 - ビルドコンテナとヘルパーコンテナの`run_as_user`と`run_as_group`が上書きされます。 - すべてのサービスコンテナがポッドセキュリティコンテキストから`run_as_user`と`run_as_group`を継承することが指定されます。 ```toml concurrent = 4 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registry.example.com/helper:latest" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 run_as_group = 59417 fs_group = 59417 [runners.kubernetes.init_permissions_container_security_context] run_as_user = 1000 run_as_group = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 65534 run_as_group = 65534 [runners.kubernetes.build_container_security_context.capabilities] add = ["NET_ADMIN"] [runners.kubernetes.helper_container_security_context] run_as_user = 1000 run_as_group = 1000 [runners.kubernetes.service_container_security_context] run_as_user = 1000 run_as_group = 1000 ``` ### プルポリシーを設定する {#set-a-pull-policy} `config.toml`ファイルで`pull_policy`パラメータを使用して、1つまたは複数のプルポリシーを指定します。このポリシーは、イメージのフェッチと更新の方法を制御します。ビルドイメージ、ヘルパーイメージ、およびすべてのサービスに適用されます。 使用するポリシーを決定するには、[プルポリシーに関するKubernetesのドキュメント](https://kubernetes.io/docs/concepts/containers/images/#image-pull-policy)を参照してください。 1つのプルポリシーの場合は次のようになります。 ```toml [runners.kubernetes] pull_policy = "never" ``` 複数のプルポリシーの場合は次のようになります。 ```toml [runners.kubernetes] # use multiple pull policies pull_policy = ["always", "if-not-present"] ``` 複数のポリシーを定義すると、イメージが正常に取得されるまで各ポリシーが試行されます。たとえば`[ always, if-not-present ]`を使用する場合、一時的なレジストリの問題が原因で`always`ポリシーが失敗すると、ポリシー`if-not-present`が使用されます。 失敗したプルを再試行するには、次のようにします。 ```toml [runners.kubernetes] pull_policy = ["always", "always"] ``` GitLabの命名規則はKubernetesの命名規則とは異なります。 | Runnerのプルポリシー | Kubernetesのプルポリシー | 説明 | |--------------------|------------------------|-------------| | なし | なし | Kubernetesによって指定されたデフォルトポリシーを使用します。 | | `if-not-present` | `IfNotPresent` | ジョブを実行するノードにイメージがまだ存在しない場合にのみ、イメージがプルされます。このプルポリシーを使用する前に、[セキュリティに関する考慮事項](../../security/_index.md#usage-of-private-docker-images-with-if-not-present-pull-policy)を確認してください。 | | `always` | `Always` | ジョブが実行されるたびにイメージがプルされます。 | | `never` | `Never` | イメージはプルされません。イメージがノードにすでに存在している必要があります。 | ### コンテナ機能を指定する {#specify-container-capabilities} コンテナで使用する[Kubernetes機能](https://kubernetes.io/docs/tasks/configure-pod-container/security-context/#set-capabilities-for-a-container)を指定できます。 コンテナ機能を指定するには、`config.toml`で`cap_add`オプションと`cap_drop`オプションを使用します。コンテナランタイムは、[Docker](https://github.com/moby/moby/blob/19.03/oci/defaults.go#L14-L32)または[このコンテナ](https://github.com/containerd/containerd/blob/v1.4.0/oci/spec.go#L93-L110)のように、機能のデフォルトリストを定義することもできます。 Runnerがデフォルトで削除する[機能のリスト](#default-list-of-dropped-capabilities)があります。`cap_add`オプションに指定した機能は、削除対象から除外されます。 `config.toml`ファイルの設定例を次に示します。 ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] # ... cap_add = ["SYS_TIME", "IPC_LOCK"] cap_drop = ["SYS_ADMIN"] # ... ``` 機能を指定するときには、次のようになります。 - ユーザー定義の`cap_drop`は、ユーザー定義の`cap_add`よりも優先されます。両方の設定で同じ機能を定義した場合、`cap_drop`の機能のみがコンテナに渡されます。 - コンテナ設定に渡される機能識別子から`CAP_`プレフィックスを削除します。たとえば、`CAP_SYS_TIME`機能を追加または削除する場合は、設定ファイルに文字列`SYS_TIME`を入力します。 - Kubernetesクラスターのオーナーが[PodSecurityPolicyを定義できます](https://kubernetes.io/docs/concepts/security/pod-security-policy/#capabilities)。このポリシーでは、特定の機能をデフォルトで許可、制限、または追加できます。これらのルールは、すべてのユーザー定義設定よりも優先されます。 ### コンテナユーザーとグループの設定 {#configure-container-user-and-group} {{< history >}} - セキュリティコンテキストベースのユーザー設定のサポートが[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38894)されました (GitLab Runner 18.4)。 {{< /history >}} Kubernetesセキュリティコンテキスト設定を使用して、コンテナで実行されるユーザーとグループを設定します。管理者はコンテナのセキュリティを制御し、ジョブが特定のコンテナタイプにユーザーを指定できるようにします。 > [!note] > Windowsのジョブ定義で`runAsUser`、`runAsGroup`、または`image:user`を設定することはサポートされていません。代わりに[runAsUserName](https://kubernetes.io/docs/tasks/configure-pod-container/configure-runasusername/)を[FF_USE_ADVANCED_POD_SPEC_CONFIGURATION](#overwrite-generated-pod-specifications)を介して設定することをお勧めします。 #### 設定の優先順位 {#configuration-precedence} Runnerはユーザー設定を次の順序で適用します: ビルドおよびサービスコンテナの場合: 1. コンテナセキュリティコンテキスト (`run_as_user`/`run_as_group`): 管理者がこの設定を制御します。 1. ポッドセキュリティコンテキスト (`run_as_user`/`run_as_group`): 管理者はポッドレベルのデフォルトを制御します。 1. ジョブ設定 (`.gitlab-ci.yml`): ユーザーがこの設定を制御します。 ヘルパーコンテナの場合: 1. ヘルパーコンテナセキュリティコンテキスト (`run_as_user`/`run_as_group`): 管理者がこの設定を制御します。 1. ポッドセキュリティコンテキスト (`run_as_user`/`run_as_group`): 管理者はポッドレベルのデフォルトを制御します。 ジョブ設定は、セキュリティ分離のためのヘルパーコンテナには適用されません。 管理者は、セキュリティコンプライアンスのためにユーザーが指定した値をオーバーライドできます。ヘルパーコンテナはジョブ仕様から分離されたままです。 #### Kubernetesの要件 {#requirements-for-kubernetes} Kubernetesでは、ユーザーIDとグループIDに数値が必要です: - ユーザーIDとグループIDは整数である必要があります。 - `SecurityContext`は`run_as_user`と`run_as_group`を使用し、数値のみを受け入れます。 - ジョブ設定では、ユーザーのみの場合は"1000"を、ユーザーとグループの場合は"1000:1001"を使用します。 #### ユーザーおよびグループ設定のオーバーライド {#override-user-and-group-settings} ポッドおよびコンテナ固有のセキュリティコンテキストを使用して、ユーザーおよびグループ設定をオーバーライドします: ```toml [[runners]] name = "k8s-runner" url = "https://gitlab.example.com" executor = "kubernetes" [runners.kubernetes] allowed_users = ["1000", "1001", "65534"] allowed_groups = ["1001", "65534"] # Pod security context - provides defaults for all containers [runners.kubernetes.pod_security_context] run_as_user = 1500 run_as_group = 1500 # Build container security context - overrides pod context [runners.kubernetes.build_container_security_context] run_as_user = 2000 run_as_group = 2001 # Helper container security context - overrides pod context [runners.kubernetes.helper_container_security_context] run_as_user = 3000 run_as_group = 3001 # Service container security context - overrides pod context [runners.kubernetes.service_container_security_context] run_as_user = 4000 run_as_group = 4001 ``` この例では: - ポッドセキュリティコンテキストは、特定の設定がないコンテナにデフォルト (1500:1500) を設定します。 - コンテナセキュリティコンテキストは、ポッドのデフォルトをオーバーライドします。 - ユーザー1500、2000、3000、および4000は`allowed_users`リストに含まれていませんが、これらの値は許可リスト検証をバイパスするため、セキュリティコンテキストでそれらを使用できます。 - この機能により、管理者はポッドとコンテナの両方のレベルで無制限のオーバーライド制御が可能になります。 各コンテナタイプを個別に設定できます。セキュリティコンテキスト設定は、ジョブ設定におけるユーザーの仕様よりも優先されます。 #### ジョブ設定でユーザーを指定する {#specify-users-in-job-configuration} ジョブはイメージ設定でユーザーを指定できます: ```yaml # Job with custom user job: image: name: alpine:latest kubernetes: user: "1000" script: - whoami - id # Job with user and group job_with_group: image: name: alpine:latest kubernetes: user: "1000:1001" script: - whoami - id # Job using environment variable job_dynamic: image: name: alpine:latest kubernetes: user: "${CUSTOM_USER_ID}" variables: CUSTOM_USER_ID: "1000" script: - whoami ``` #### セキュリティ検証 {#security-validation} Runnerは、ジョブレベルの設定のみの許可リストに対してユーザーIDとグループIDを検証します: - ルートユーザー/グループ (固有識別子/GID 0): ジョブ設定には常に明示的な許可リスト権限が必要です。 - 空の`allowed_users`: 非ルートジョブユーザーはすべて許可されます。 - 指定された`allowed_users`: リストされたジョブユーザーのみが許可されます。 - 空の`allowed_groups`: 非ルートジョブグループはすべて許可されます。 - 指定された`allowed_groups`: リストされたジョブグループのみが許可されます。 - セキュリティコンテキスト設定: 許可リストに対して検証されません (管理者オーバーライド) ```toml [runners.kubernetes] allowed_users = ["1000", "65534"] allowed_groups = ["1001", "65534"] ``` #### コンテナの動作と優先順位 {#container-behavior-and-precedence} セキュリティコンテキスト設定は、次の優先順位 (最高から最低) に従います: 1. コンテナセキュリティコンテキスト 1. ポッドセキュリティコンテキスト 1. ジョブ設定 ```toml [runners.kubernetes] # Pod-level defaults [runners.kubernetes.pod_security_context] run_as_user = 1500 run_as_group = 1500 # Container-specific overrides [runners.kubernetes.build_container_security_context] run_as_user = 1000 run_as_group = 1001 [runners.kubernetes.helper_container_security_context] run_as_user = 1000 run_as_group = 1001 ``` ```yaml job: image: name: alpine:latest kubernetes: user: "2000:2001" # Ignored - container security context uses 1000:1001 ``` 各コンテナタイプは、ポッドレベルのフォールバックを持つセキュリティコンテキスト設定を使用します: - ビルドコンテナ: 最初に`build_container_security_context`を使用し、次に`pod_security_context`を使用し、その次に`.gitlab-ci.yml`からのジョブレベルのユーザー設定を使用します。 - ヘルパーコンテナ: 最初に`helper_container_security_context`を使用し、次に`pod_security_context`を使用します。ジョブレベルのユーザー設定は継承しません。 - サービスコンテナ: 最初に`service_container_security_context`を使用し、次に`pod_security_context`を使用し、その次にジョブレベルのユーザー設定を使用します。 このアプローチにより、各コンテナタイプのセキュリティ設定を詳細に制御できると同時に、ヘルパーコンテナをジョブ仕様から分離できます。 #### Docker executorとの比較 {#comparison-with-docker-executor} | 機能 | Docker executor | Kubernetes executor | |-------------------------------|------------------------------------|----------------------------------------------| | ユーザー形式 | ユーザー名または固有識別子 (`root`または`1000`) | 数値固有識別子のみ (`1000`) | | グループ形式 | ユーザーフィールドではサポートされていません。 | 数値GID (`1000:1001`) | | 管理者オーバーライド方法 | Runnerの`user`フィールド | コンテナおよびポッドセキュリティコンテキスト | | 優先順位 | Runner > ジョブ | コンテナコンテキスト > ポッドコンテキスト > ジョブ | | セキュリティ検証 | ユーザー名許可リスト | 数値固有識別子/GID許可リスト | | 管理者オーバーライド | サポート対象 | サポートされています (ポッドおよびコンテナレベル) | | ヘルパーコンテナユーザー | ビルドコンテナと同じ | 独自の`helper_container_security_context`を使用 | | ポッドレベルのデフォルト | 利用不可 | `pod_security_context` | #### ユーザーとグループ設定のトラブルシューティングを行う {#troubleshoot-user-and-group-configuration} ##### エラー: `failed to parse UID`または`failed to parse GID` {#error-failed-to-parse-uid-or-failed-to-parse-gid} - ユーザーIDが数値であることを確認します: `"1000"`であって`"user"`ではありません。 - 形式を確認します: ユーザーとグループに`"1000:1001"` - 負の値は許可されていません。 ##### エラー: `user "1000" is not in the allowed list` {#error-user-1000-is-not-in-the-allowed-list} このエラーは、ジョブレベルのユーザー設定 (`.gitlab-ci.yml`) の場合にのみ発生します。ユーザーをRunner設定の`allowed_users`に追加するか、`allowed_users`を削除して非ルートジョブユーザーを許可します。セキュリティコンテキストとポッドセキュリティコンテキストのユーザーは、許可リストに対して検証されません。 ##### エラー: `group "1001" is not in the allowed list` {#error-group-1001-is-not-in-the-allowed-list} このエラーは、ジョブレベルのグループ設定 (`.gitlab-ci.yml`) の場合にのみ発生します。グループをRunner設定の`allowed_groups`に追加するか、`allowed_groups`を削除して非ルートジョブグループを許可します。セキュリティコンテキストとポッドセキュリティコンテキストのグループは、許可リストに対して検証されません。 ##### エラー: `user "0" is not in the allowed list` (ルートユーザーがブロックされています) {#error-user-0-is-not-in-the-allowed-list-root-user-blocked} このエラーは、ジョブ設定 (`.gitlab-ci.yml`) でルートが指定されている場合にのみ発生します。ジョブ設定のルートユーザー (固有識別子0) には明示的な権限が必要です。`"0"`を`allowed_users`に追加します。または、セキュリティコンテキストまたはポッドセキュリティコンテキストを使用してルートユーザーを設定します: `run_as_user = 0` (許可リスト検証をバイパスする) ##### コンテナが予期しないユーザーとして実行される {#container-runs-as-different-user-than-expected} Runnerの設定がセキュリティコンテキストでジョブ設定をオーバーライドするかどうかを確認します (セキュリティコンテキストが常に優先されます)。ジョブ設定のみを使用している場合は、`allowed_users`に目的のユーザーIDが含まれているかどうかを検証します。セキュリティコンテキストの値は許可リストに対して検証されず、管理者オーバーライド機能を提供します。 ### コンテナリソースを上書きする {#overwrite-container-resources} 各CI/CDジョブのKubernetes CPU割り当てとメモリ割り当てを上書きできます。ビルドコンテナ、ヘルパーコンテナ、サービスコンテナのリクエストと制限の設定を適用できます。 コンテナリソースを上書きするには、`.gitlab-ci.yml`ファイルで次の変数を使用します。 変数の値は、そのリソースの[最大上書き](#configuration-settings)設定に制限されます。リソースに対して最大上書き設定が指定されていない場合、変数は使用されません。 ``` yaml variables: KUBERNETES_CPU_REQUEST: "3" KUBERNETES_CPU_LIMIT: "5" KUBERNETES_MEMORY_REQUEST: "2Gi" KUBERNETES_MEMORY_LIMIT: "4Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "1Gi" KUBERNETES_HELPER_CPU_REQUEST: "3" KUBERNETES_HELPER_CPU_LIMIT: "5" KUBERNETES_HELPER_MEMORY_REQUEST: "2Gi" KUBERNETES_HELPER_MEMORY_LIMIT: "4Gi" KUBERNETES_HELPER_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_HELPER_EPHEMERAL_STORAGE_LIMIT: "1Gi" KUBERNETES_SERVICE_CPU_REQUEST: "3" KUBERNETES_SERVICE_CPU_LIMIT: "5" KUBERNETES_SERVICE_MEMORY_REQUEST: "2Gi" KUBERNETES_SERVICE_MEMORY_LIMIT: "4Gi" KUBERNETES_SERVICE_EPHEMERAL_STORAGE_REQUEST: "512Mi" KUBERNETES_SERVICE_EPHEMERAL_STORAGE_LIMIT: "1Gi" ``` ### サービスのリストの定義 {#define-a-list-of-services} {{< history >}} - GitLab Runner 16.9で[`HEALTCHECK_TCP_SERVICES`のサポートが導入されました](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27215)。 {{< /history >}} `config.toml`で[サービス](https://docs.gitlab.com/ci/services/) のリストを定義します。 ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] helper_image = "gitlab-registy.example.com/helper:latest" [[runners.kubernetes.services]] name = "postgres:12-alpine" alias = "db1" [[runners.kubernetes.services]] name = "registry.example.com/svc1" alias = "svc1" entrypoint = ["entrypoint.sh"] command = ["executable","param1","param2"] environment = ["ENV=value1", "ENV2=value2"] ``` サービス環境に`HEALTHCHECK_TCP_PORT`が含まれている場合、GitLab Runnerは、ユーザーCIスクリプトを開始する前に、サービスがそのポートで応答するまで待ちます。`.gitlab-ci.yml`の`services`セクションで`HEALTHCHECK_TCP_PORT`環境変数を設定することもできます。 ### サービスコンテナのリソースを上書きする {#overwrite-service-containers-resources} ジョブに複数のサービスコンテナがある場合、各サービスコンテナに明示的なリソースリクエストと制限を設定できます。`.gitlab-ci.yml`で指定されているコンテナリソースを上書きするには、各サービスでvariables属性を使用します。 ```yaml services: - name: redis:5 alias: redis5 variables: KUBERNETES_SERVICE_CPU_REQUEST: "3" KUBERNETES_SERVICE_CPU_LIMIT: "6" KUBERNETES_SERVICE_MEMORY_REQUEST: "3Gi" KUBERNETES_SERVICE_MEMORY_LIMIT: "6Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "2Gi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "3Gi" - name: postgres:12 alias: MY_relational-database.12 variables: KUBERNETES_CPU_REQUEST: "2" KUBERNETES_CPU_LIMIT: "4" KUBERNETES_MEMORY_REQUEST: "1Gi" KUBERNETES_MEMORY_LIMIT: "2Gi" KUBERNETES_EPHEMERAL_STORAGE_REQUEST: "1Gi" KUBERNETES_EPHEMERAL_STORAGE_LIMIT: "2Gi" ``` これらの特定の設定は、ジョブの一般設定よりも優先されます。これらの値は引き続き、そのリソースの[最大上書き設定](#configuration-settings)に制限されます。 ### Kubernetesのデフォルトのサービスアカウントを上書きする {#overwrite-the-kubernetes-default-service-account} `.gitlab-ci.yml`ファイル内の各CI/CDジョブのKubernetesサービスアカウントを上書きするには、変数`KUBERNETES_SERVICE_ACCOUNT_OVERWRITE`を設定します。 この変数を使用して、ネームスペースにアタッチされたサービスアカウントを指定できます。これは、複雑なRBAC設定で必要になることがあります。 ``` yaml variables: KUBERNETES_SERVICE_ACCOUNT_OVERWRITE: ci-service-account ``` CIの実行中に指定されたサービスアカウントのみが使用されるようにするには、次のいずれかの正規表現を定義します。 - `service_account_overwrite_allowed`設定。 - `KUBERNETES_SERVICE_ACCOUNT_OVERWRITE_ALLOWED`環境変数。 どちらも設定しない場合、上書きは無効になります。 ### `RuntimeClass`を設定する {#set-the-runtimeclass} `runtime_class_name`を使用して、各ジョブコンテナの[`RuntimeClass`](https://kubernetes.io/docs/concepts/containers/runtime-class/)を設定します。 `RuntimeClass`名を指定したが、クラスターで設定しなかった場合、またはこの機能がサポートされていない場合、executorはジョブの作成に失敗します。 ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] runtime_class_name = "myclass" ``` ### ビルドログとスクリプトのベースディレクトリを変更する {#change-the-base-directory-for-build-logs-and-scripts} {{< history >}} - GitLab Runner 17.2で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/37760)されました。 {{< /history >}} ビルドログとスクリプトのために`emptyDir`ボリュームがポッドにマウントされるディレクトリを変更できます。このディレクトリは次の操作に使用できます。 - 変更されたイメージでジョブポッドを実行する。 - 特権のないユーザーとして実行する。 - `SecurityContext`設定をカスタマイズする。 ディレクトリを変更するには、次のようにします。 - ビルドログの場合は`logs_base_dir`を設定します。 - ビルドスクリプトの場合は`scripts_base_dir`を設定します。 期待される値は、末尾のスラッシュがないベースディレクトリを表す文字列です(`/tmp`、`/mydir/example`など)。**ディレクトリはすでに存在している必要があります**。 この値は、ビルドログおよびスクリプトのために生成されたパスの先頭に追加されます。例: ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" ``` この設定では、次の場所に`emptyDir`ボリュームがマウントされます。 - ビルドログの場合はデフォルトの`/logs-${CI_PROJECT_ID}-${CI_JOB_ID}`ではなく`/tmp/logs-${CI_PROJECT_ID}-${CI_JOB_ID}`。 - ビルドスクリプトの場合は`/tmp/scripts-${CI_PROJECT_ID}-${CI_JOB_ID}`。 ### ユーザーネームスペース {#user-namespaces} Kubernetes 1.30以降では、[ユーザーネームスペース](https://kubernetes.io/docs/concepts/workloads/pods/user-namespaces/)を使用して、コンテナ内で実行しているユーザーをホスト上のユーザーから隔離できます。コンテナ内でrootとして実行しているプロセスは、ホスト上の別の特権のないユーザーとして実行できます。 ユーザーネームスペースを使用すると、CI/CDジョブの実行に使用されるイメージをより細かく制御できます。追加の設定が必要な操作(rootとしての実行など)も、ホスト上で追加のアタックサーフェスを生じることなく機能できます。 この機能を使用するには、クラスターが[適切に設定されている](https://kubernetes.io/docs/concepts/workloads/pods/user-namespaces/#introduction)ことを確認してください。次の例では、`hostUsers`キーの`pod_spec`を追加し、特権ポッドと特権エスカレーションの両方を無効にします。 ```toml [[runners]] environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true"] builds_dir = "/tmp/builds" [runners.kubernetes] logs_base_dir = "/tmp" scripts_base_dir = "/tmp" privileged = false allowPrivilegeEscalation = false [[runners.kubernetes.pod_spec]] name = "hostUsers" patch = ''' [{"op": "add", "path": "/hostUsers", "value": false}] ''' patch_type = "json" ``` ユーザーネームスペースでは、ビルドディレクトリのデフォルトパス(`builds_dir`)、ビルドログのデフォルトパス(`logs_base_dir`)、またはビルドスクリプトのデフォルトパス(`scripts_base_dir`)を使用できません。コンテナのrootユーザーであっても、ボリュームをマウントする権限がありません。また、コンテナのファイルシステムのルートにディレクトリを作成することもできません。 代わりに[ビルドログとスクリプトのベースディレクトリを変更](#change-the-base-directory-for-build-logs-and-scripts)できます。`[[runners]].builds_dir`を設定して、ビルドディレクトリを変更することもできます。 ## オペレーティングシステム、アーキテクチャ、およびWindowsカーネルバージョン {#operating-system-architecture-and-windows-kernel-version} 設定済みのクラスターで異なるオペレーティングシステムを実行しているノードがある場合、Kubernetes executorを使用するGitLab Runnerは、それらのオペレーティングシステムでビルドを実行できます。 システムはヘルパーイメージのオペレーティングシステム、アーキテクチャ、およびWindowsカーネルバージョン(該当する場合)を判別します。次にこれらのパラメータを、ビルドの他の側面(使用するコンテナやイメージなど)に利用します。 次の図は、システムがこれらの詳細を検出する仕組みを示しています。 ```mermaid %%|fig-align: center flowchart TB init[Initial defaults:
OS: linux
Arch: amd64] hasAutoset{Configuration
helper_image_autoset_arch_and_os == true?} setArch[Update:
Arch: same as runner] isWin{GitLab Runner runs on Windows?} setWin[Update:
OS: windows
KernelVersion: same as runner] hasNodeSel{node_selector configured
in runners.kubernetes section?} hasNodeSelOverride{node_selector configured
as overwrite?} updateNodeSel[Update from node_selector if set:
OS: from kubernetes.io/os
Arch: from kubernetes.io/arch
KernelVersion: from node.kubernetes.io/windows-build] updateNodeSelOverride[Update from node_selector overwrites if set:
OS: from kubernetes.io/os
Arch: from kubernetes.io/arch
KernelVersion: from node.kubernetes.io/windows-build] result[final OS, Arch, kernelVersion] init --> hasAutoset hasAutoset -->|false | hasNodeSel hasAutoset -->|true | setArch setArch --> isWin isWin -->|false | hasNodeSel isWin -->|true | setWin setWin --> hasNodeSel hasNodeSel -->|false | hasNodeSelOverride hasNodeSel -->|true | updateNodeSel updateNodeSel --> hasNodeSelOverride hasNodeSelOverride -->|false | result hasNodeSelOverride -->|true | updateNodeSelOverride updateNodeSelOverride --> result ``` 以下に、ビルドのオペレーティングシステム、アーキテクチャ、およびWindowsカーネルバージョンの選択に影響を与える唯一のパラメータを示します。 - `helper_image_autoset_arch_and_os`設定 - 次の`kubernetes.io/os`、`kubernetes.io/arch`、および`node.kubernetes.io/windows-build`ラベルセレクター: - `node_selector`設定 - `node_selector`上書き 他のパラメータは、上記で説明した選択プロセスに影響を与えません。ただし、`affinity`などのパラメータを使用して、ビルドがスケジュールされるノードを細かく制限できます。 ## ノード {#nodes} ### ビルドを実行するノードを指定する {#specify-the-node-to-execute-builds} Kubernetesクラスター内のどのノードをビルドの実行に使用できるかを指定するには、`node_selector`オプションを使用します。これは、`string=string`形式(環境変数の場合は`string:string`)の[`key=value`](https://toml.io/en/v1.0.0#keyvalue-pair)ペアです。 Runnerは提供された情報を使用して、ビルドのオペレーティングシステムとアーキテクチャを判別します。これにより、正しい[ヘルパーイメージ](../../configuration/advanced-configuration.md#helper-image)が使用されるようになります。デフォルトのオペレーティングシステムとアーキテクチャは`linux/amd64`です。 特定のラベルを使用して、異なるオペレーティングシステムとアーキテクチャを持つノードをスケジュールできます。 #### `linux/arm64`の例 {#example-for-linuxarm64} ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes.node_selector] "kubernetes.io/arch" = "arm64" "kubernetes.io/os" = "linux" ``` #### `windows/amd64`の例 {#example-for-windowsamd64} Kubernetes for Windowsには特定の[制限](https://kubernetes.io/docs/concepts/windows/intro/#windows-os-version-support)があります。プロセス分離を使用している場合は、[`node.kubernetes.io/windows-build`](https://kubernetes.io/docs/reference/labels-annotations-taints/#nodekubernetesiowindows-build)ラベルを使用して特定のWindowsビルドバージョンも指定する必要があります。 ```toml [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" # The FF_USE_POWERSHELL_PATH_RESOLVER feature flag has to be enabled for PowerShell # to resolve paths for Windows correctly when Runner is operating in a Linux environment # but targeting Windows nodes. environment = ["FF_USE_POWERSHELL_PATH_RESOLVER=true"] [runners.kubernetes.node_selector] "kubernetes.io/arch" = "amd64" "kubernetes.io/os" = "windows" "node.kubernetes.io/windows-build" = "10.0.20348" ``` ### ノードセレクターの上書き {#overwrite-the-node-selector} ノードセレクターを上書きするには、次の手順に従います。 1. `config.toml`ファイルまたはHelm `values.yaml`ファイルで、ノードセレクターの上書きを有効にします。 ```toml runners: ... config: | [[runners]] [runners.kubernetes] node_selector_overwrite_allowed = ".*" ``` 1. `.gitlab-ci.yml`ファイルで、ノードセレクターを上書きするための変数を定義します。 ```yaml variables: KUBERNETES_NODE_SELECTOR_* = '' ``` 次の例では、Kubernetesノードアーキテクチャを上書きするために、設定が`config.toml`ファイルと`.gitlab-ci.yml`ファイルで指定されています。 {{< tabs >}} {{< tab title="`config.toml`" >}} ```toml concurrent = 1 check_interval = 1 log_level = "debug" shutdown_timeout = 0 listen_address = ':9252' [session_server] session_timeout = 1800 [[runners]] name = "" url = "https://gitlab.com/" id = 0 token = "__REDACTED__" token_obtained_at = "0001-01-01T00:00:00Z" token_expires_at = "0001-01-01T00:00:00Z" executor = "kubernetes" shell = "bash" [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false image = "alpine" namespace = "" namespace_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" node_selector_overwrite_allowed = "kubernetes.io/arch=.*" # <--- allows overwrite of the architecture ``` {{< /tab >}} {{< tab title="`.gitlab-ci.yml`" >}} ```yaml job: image: IMAGE_NAME variables: KUBERNETES_NODE_SELECTOR_ARCH: 'kubernetes.io/arch=amd64' # <--- select the architecture ``` {{< /tab >}} {{< /tabs >}} ### ノードの関連性のリストを定義する {#define-a-list-of-node-affinities} ビルド時にポッド仕様に追加する[ノードアフィニティ](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#node-affinity)のリストを定義します。 > [!note] > `node_affinities`はビルドの実行に使用するオペレーティングシステムを決定するものではなく、`node_selectors`のみが決定します。詳細については、[オペレーティングシステム、アーキテクチャ、およびWindowsカーネルバージョン](#operating-system-architecture-and-windows-kernel-version)を参照してください。`config.toml`の設定例を次に示します。 ```toml concurrent = 1 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.node_affinity] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "cpu_speed" operator = "In" values = ["fast"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "mem_speed" operator = "In" values = ["fast"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 50 [runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_expressions]] key = "core_count" operator = "In" values = ["high", "32"] [[runners.kubernetes.affinity.node_affinity.preferred_during_scheduling_ignored_during_execution.preference.match_fields]] key = "cpu_type" operator = "In" values = ["arm64"] [runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms]] [[runners.kubernetes.affinity.node_affinity.required_during_scheduling_ignored_during_execution.node_selector_terms.match_expressions]] key = "kubernetes.io/e2e-az-name" operator = "In" values = [ "e2e-az1", "e2e-az2" ] ``` ### ポッドがスケジュールされるノードを定義する {#define-nodes-where-pods-are-scheduled} 他のポッドのラベルに基づいて[ポッドをスケジュールできる](https://kubernetes.io/docs/concepts/scheduling-eviction/assign-pod-node/#inter-pod-affinity-and-anti-affinity)ノードを制約するには、ポッドアフィニティとアンチアフィニティを使用します。 `config.toml`の設定例を次に示します。 ```toml concurrent = 1 [[runners]] name = "myRunner" url = "gitlab.example.com" executor = "kubernetes" [runners.kubernetes] [runners.kubernetes.affinity] [runners.kubernetes.affinity.pod_affinity] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_anti_affinity] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution]] topology_key = "failure-domain.beta.kubernetes.io/zone" namespaces = ["namespace_1", "namespace_2"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.label_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.required_during_scheduling_ignored_during_execution.namespace_selector.match_expressions]] key = "security" operator = "In" values = ["S1"] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution]] weight = 100 [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term] topology_key = "failure-domain.beta.kubernetes.io/zone" [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.label_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] [runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector] [[runners.kubernetes.affinity.pod_anti_affinity.preferred_during_scheduling_ignored_during_execution.pod_affinity_term.namespace_selector.match_expressions]] key = "security_2" operator = "In" values = ["S2"] ``` ## ネットワーキング {#networking} ### コンテナライフサイクルフックを設定する {#configure-a-container-lifecycle-hook} [コンテナライフサイクルフック](https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/)を使用して、対応するライフサイクルフックの実行時にハンドラーに設定されているコードを実行します。 `PreStop`と`PostStart`の2種類のフックを設定できます。それぞれのフックでは1つのハンドラータイプのみを設定できます。 `config.toml`ファイルの設定例を次に示します。 ```toml [[runners]] name = "kubernetes" url = "https://gitlab.example.com/" executor = "kubernetes" token = "yrnZW46BrtBFqM7xDzE7dddd" [runners.kubernetes] image = "alpine:3.11" privileged = true namespace = "default" [runners.kubernetes.container_lifecycle.post_start.exec] command = ["touch", "/builds/postStart.txt"] [runners.kubernetes.container_lifecycle.pre_stop.http_get] port = 8080 host = "localhost" path = "/test" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_1" value = "header_value_1" [[runners.kubernetes.container_lifecycle.pre_stop.http_get.http_headers]] name = "header_name_2" value = "header_value_2" ``` 次の設定を使用して、各ライフサイクルフックを設定します。 | オプション | 型 | 必須 | 説明 | |--------------|---------------------------------|----------|-------------| | `exec` | `KubernetesLifecycleExecAction` | いいえ | `Exec`は、実行するアクションを指定します。 | | `http_get` | `KubernetesLifecycleHTTPGet` | いいえ | `HTTPGet`は、実行するHTTPリクエストを指定します。 | | `tcp_socket` | `KubernetesLifecycleTcpSocket` | いいえ | `TCPsocket`は、TCPポートが関与するアクションを指定します。 | #### `KubernetesLifecycleExecAction` {#kuberneteslifecycleexecaction} | オプション | 型 | 必須 | 説明 | |-----------|---------------|----------|-------------| | `command` | `string`リスト | はい | コンテナ内で実行するコマンドライン。 | #### `KubernetesLifecycleHTTPGet` {#kuberneteslifecyclehttpget} | オプション | 型 | 必須 | 説明 | |----------------|-----------------------------------------|----------|-------------| | `port` | `int` | はい | コンテナでアクセスするポートの番号。 | | `host` | 文字列 | いいえ | 接続先のホスト名。デフォルトはポッドIPです(オプション)。 | | `path` | 文字列 | いいえ | HTTPサーバーでアクセスするパス(オプション)。 | | `scheme` | 文字列 | いいえ | ホストへの接続に使用されるスキーム。デフォルトはHTTPです(オプション)。 | | `http_headers` | `KubernetesLifecycleHTTPGetHeader`リスト | いいえ | リクエストで設定するカスタムヘッダー(オプション)。 | #### `KubernetesLifecycleHTTPGetHeader` {#kuberneteslifecyclehttpgetheader} | オプション | 型 | 必須 | 説明 | |---------|--------|----------|-------------| | `name` | 文字列 | はい | HTTPヘッダー名。 | | `value` | 文字列 | はい | HTTPヘッダー値。 | #### `KubernetesLifecycleTcpSocket` {#kuberneteslifecycletcpsocket} | オプション | 型 | 必須 | 説明 | |--------|--------|----------|-------------| | `port` | `int` | はい | コンテナでアクセスするポートの番号。 | | `host` | 文字列 | いいえ | 接続先のホスト名。デフォルトはポッドIPです(オプション)。 | ### ポッドのDNS設定をする {#configure-pod-dns-settings} ポッドの[DNS設定](https://kubernetes.io/docs/concepts/services-networking/dns-pod-service/#pod-dns-config)をするには、次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |---------------|-----------------------------|----------|-------------| | `nameservers` | `string`リスト | いいえ | ポッドのDNSサーバーとして使用されるIPアドレスのリスト。 | | `options` | `KubernetesDNSConfigOption` | いいえ | nameプロパティ(必須)とvalueプロパティ(オプション)を含めることができるオブジェクトのリスト(オプション)。 | | `searches` | `string`リスト | いいえ | ポッドでのホスト名検索に使用するDNS検索ドメインのリスト。 | `config.toml`ファイルの設定例を次に示します。 ```toml concurrent = 1 check_interval = 30 [[runners]] name = "myRunner" url = "https://gitlab.example.com" token = "__REDACTED__" executor = "kubernetes" [runners.kubernetes] image = "alpine:latest" [runners.kubernetes.dns_config] nameservers = [ "1.2.3.4", ] searches = [ "ns1.svc.cluster-domain.example", "my.dns.search.suffix", ] [[runners.kubernetes.dns_config.options]] name = "ndots" value = "2" [[runners.kubernetes.dns_config.options]] name = "edns0" ``` #### `KubernetesDNSConfigOption` {#kubernetesdnsconfigoption} | オプション | 型 | 必須 | 説明 | |---------|-----------|----------|-------------| | `name` | 文字列 | はい | 設定オプションの名前。 | | `value` | `*string` | いいえ | 設定オプションの値。 | #### 削除される機能のデフォルトリスト {#default-list-of-dropped-capabilities} GitLab Runnerは、デフォルトで次の機能を削除します。 ユーザー定義の`cap_add`は、削除される機能のデフォルトリストよりも優先されます。デフォルトで削除される機能を追加する場合は、`cap_add`に追加します。 - `NET_RAW` ### ホストエイリアスを追加する {#add-extra-host-aliases} この機能は、Kubernetes 1.7以降で使用できます。 [ホストエイリアス](https://kubernetes.io/docs/tasks/network/customize-hosts-file-for-pods/)を設定して、コンテナ内の`/etc/hosts`ファイルにエントリを追加するようにKubernetesに指示します。 次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |-------------|---------------|----------|-------------| | `IP` | 文字列 | はい | ホストをアタッチするIPアドレス。 | | `Hostnames` | `string`リスト | はい | IPにアタッチされているホスト名エイリアスのリスト。 | `config.toml`ファイルの設定例を次に示します。 ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.host_aliases]] ip = "127.0.0.1" hostnames = ["web1", "web2"] [[runners.kubernetes.host_aliases]] ip = "192.168.1.1" hostnames = ["web14", "web15"] ``` コマンドラインパラメータ`--kubernetes-host_aliases`とJSONインプットを使用して、ホストエイリアスを設定することもできます。例: ```shell gitlab-runner register --kubernetes-host_aliases '[{"ip":"192.168.1.100","hostnames":["myservice.local"]},{"ip":"192.168.1.101","hostnames":["otherservice.local"]}]' ``` ## ボリューム {#volumes} ### Kubernetes executorでキャッシュを使用する {#using-the-cache-with-the-kubernetes-executor} キャッシュがKubernetes executorで使用されている場合、`/cache`という名前のボリュームがポッドにマウントされます。ジョブの実行中にキャッシュされたデータが必要になると、Runnerはキャッシュされたデータが利用可能かどうかを確認します。キャッシュボリュームで圧縮ファイルが利用可能な場合、キャッシュされたデータが利用可能です。 キャッシュボリュームを設定するには、`config.toml`ファイルで[`cache_dir`](../../configuration/advanced-configuration.md#the-runners-section)設定を使用します。 - 圧縮ファイルが利用可能な場合、圧縮ファイルはビルドフォルダーに展開され、ジョブで使用できるようになります。 - 利用できない場合、キャッシュされたデータは設定されているストレージからダウンロードされ、圧縮ファイルとして`cache dir`に保存されます。次に、圧縮ファイルが`build`フォルダーに解凍されます。 ### ボリュームタイプを設定する {#configure-volume-types} 次のボリュームタイプをマウントできます。 - `hostPath` - `persistentVolumeClaim` - `configMap` - `secret` - `emptyDir` - `csi` 複数のボリュームタイプを使用した設定の例を以下に示します。 ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.volumes.host_path]] name = "hostpath-1" mount_path = "/path/to/mount/point" read_only = true host_path = "/path/on/host" [[runners.kubernetes.volumes.host_path]] name = "hostpath-2" mount_path = "/path/to/mount/point_2" read_only = true [[runners.kubernetes.volumes.pvc]] name = "pvc-1" mount_path = "/path/to/mount/point1" [[runners.kubernetes.volumes.config_map]] name = "config-map-1" mount_path = "/path/to/directory" [runners.kubernetes.volumes.config_map.items] "key_1" = "relative/path/to/key_1_file" "key_2" = "key_2" [[runners.kubernetes.volumes.secret]] name = "secrets" mount_path = "/path/to/directory1" read_only = true [runners.kubernetes.volumes.secret.items] "secret_1" = "relative/path/to/secret_1_file" [[runners.kubernetes.volumes.empty_dir]] name = "empty-dir" mount_path = "/path/to/empty_dir" medium = "Memory" [[runners.kubernetes.volumes.csi]] name = "csi-volume" mount_path = "/path/to/csi/volume" driver = "my-csi-driver" [runners.kubernetes.volumes.csi.volume_attributes] size = "2Gi" ``` #### `hostPath`ボリューム {#hostpath-volume} コンテナ内の指定されたホストパスをマウントするようにKubernetesに指示するには、[`hostPath`ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/#hostpath)を設定します。 `config.toml`ファイルで次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |---------------------|---------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前。 | | `mount_path` | 文字列 | はい | コンテナ内でボリュームがマウントされるパス。 | | `sub_path` | 文字列 | いいえ | マウントされるボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)(ルートではありません)。 | | `host_path` | 文字列 | いいえ | ボリュームとしてマウントされるホスト上のパス。値を指定しない場合、デフォルトでは`mount_path`と同じパスになります。 | | `read_only` | ブール値 | いいえ | ボリュームを読み取り専用モードに設定します。`false`がデフォルトです。 | | `mount_propagation` | 文字列 | いいえ | コンテナ間でマウントされたボリュームを共有します。詳細については、[マウントの伝搬](https://kubernetes.io/docs/concepts/storage/volumes/#mount-propagation)を参照してください。 | #### `persistentVolumeClaim`ボリューム {#persistentvolumeclaim-volume} Kubernetesクラスターで定義されている`persistentVolumeClaim`を使用してコンテナにマウントすることをKubernetesに指示するには、[`persistentVolumeClaim`ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/#persistentvolumeclaim)を設定します。 `config.toml`ファイルで次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |---------------------|---------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前であり、使用する`PersistentVolumeClaim`の名前。変数をサポートしています。詳細については、[並行処理ごとの永続的ビルドボリューム](#persistent-per-concurrency-build-volumes)を参照してください。 | | `mount_path` | 文字列 | はい | ボリュームがマウントされるコンテナ内のパス。 | | `read_only` | ブール値 | いいえ | ボリュームを読み取り専用モードに設定します(デフォルトではfalseに設定されます)。 | | `sub_path` | 文字列 | いいえ | ルートの代わりにボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)をマウントします。 | | `mount_propagation` | 文字列 | いいえ | ボリュームのマウント伝播モードを設定します。詳細については、[Kubernetesのマウント伝播](https://kubernetes.io/docs/concepts/storage/volumes/#mount-propagation)を参照してください。 | #### `configMap`ボリューム {#configmap-volume} Kubernetesクラスターで定義されている[`configMap`](https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/)を使用してコンテナにマウントすることをKubernetesに指示するには、`configMap`ボリュームを設定します。 `config.toml`で次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |--------------|---------------------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前であり、使用する`configMap`の名前。 | | `mount_path` | 文字列 | はい | ボリュームがマウントされるコンテナ内のパス。 | | `read_only` | ブール値 | いいえ | ボリュームを読み取り専用モードに設定します(デフォルトではfalseに設定されます)。 | | `sub_path` | 文字列 | いいえ | ルートの代わりにボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)をマウントします。 | | `items` | `map[string]string` | いいえ | 使用する`configMap`のキーのキーからパスへのマッピング。 | `configMap`の各キーはファイルに変更され、マウントパスに保存されます。デフォルトでは次のようになります。 - すべてのキーが含まれます。 - `configMap`キーはファイル名として使用されます。 - 値はファイルコンテンツに保存されます。 デフォルトのキーと値のストレージを変更するには、`items`オプションを使用します。`items`オプションを使用すると、**指定されたキーのみ**がボリュームに追加され、他のキーはすべてスキップされます。 > [!note] > 存在しないキーを使用すると、ポッド作成ステージでジョブが失敗します。 #### `secret`ボリューム {#secret-volume} Kubernetesクラスターで定義されている`secret`を使用してコンテナにマウントすることをKubernetesに指示するには、[`secret`ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/#secret)を設定します。 `config.toml`ファイルで次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |--------------|---------------------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前であり、使用する_シークレット_の名前。 | | `mount_path` | 文字列 | はい | ボリュームをマウントするコンテナ内のパス。 | | `read_only` | ブール値 | いいえ | ボリュームを読み取り専用モードに設定します(デフォルトはfalseです)。 | | `sub_path` | 文字列 | いいえ | ルートの代わりにボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)をマウントします。 | | `items` | `map[string]string` | いいえ | 使用するconfigMapからのキーのキーからパスへのマッピング。 | 選択した`secret`の各キーは、選択されているマウントパスに保存されているファイルに変更されます。デフォルトでは次のようになります。 - すべてのキーが含まれます。 - `configMap`キーはファイル名として使用されます。 - 値はファイルコンテンツに保存されます。 デフォルトのキーと値のストレージを変更するには、`items`オプションを使用します。`items`オプションを使用すると、**指定されたキーのみ**がボリュームに追加され、他のキーはすべてスキップされます。 > [!note] > 存在しないキーを使用すると、ポッド作成ステージでジョブが失敗します。 #### `emptyDir`ボリューム {#emptydir-volume} コンテナに空のディレクトリをマウントするようにKubernetesに指示するには、[`emptyDir`ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/#emptydir)を設定します。 `config.toml`ファイルで次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |--------------|--------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前。 | | `mount_path` | 文字列 | はい | ボリュームをマウントするコンテナ内のパス。 | | `sub_path` | 文字列 | いいえ | ルートの代わりにボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)をマウントします。 | | `medium` | 文字列 | いいえ | "Memory"を指定すると`tmpfs`が提供されます。それ以外の場合は、デフォルトでノードディスクストレージにデフォルト設定されます(デフォルトは"")。 | | `size_limit` | 文字列 | いいえ | `emptyDir`ボリュームに必要なローカルストレージの合計量。 | #### `csi`ボリューム {#csi-volume} コンテナに任意のストレージシステムをマウントするために、カスタム`csi`ドライバーを使用するようにKubernetesに指示するには、[Container Storage Interface(`csi`)ボリューム](https://kubernetes.io/docs/concepts/storage/volumes/#csi)を設定します。 `config.toml`で次のオプションを使用します。 | オプション | 型 | 必須 | 説明 | |---------------------|---------------------|----------|-------------| | `name` | 文字列 | はい | ボリュームの名前。 | | `mount_path` | 文字列 | はい | ボリュームをマウントするコンテナ内のパス。 | | `driver` | 文字列 | はい | 使用するボリュームドライバーの名前を指定する文字列値。 | | `fs_type` | 文字列 | いいえ | ファイルシステムのタイプの名前を指定する文字列値(`ext4`、`xfs`、`ntfs`など)。 | | `volume_attributes` | `map[string]string` | いいえ | `csi`ボリュームの属性のキー値ペアマッピング。 | | `sub_path` | 文字列 | いいえ | ルートの代わりにボリューム内の[サブパス](https://kubernetes.io/docs/concepts/storage/volumes/#using-subpath)をマウントします。 | | `read_only` | ブール値 | いいえ | ボリュームを読み取り専用モードに設定します(デフォルトはfalseです)。 | ### サービスコンテナにボリュームをマウントする {#mount-volumes-on-service-containers} ビルドコンテナに対して定義されたボリュームは、すべてのサービスコンテナにも自動的にマウントされます。この機能は、テストにかかる時間を短縮する目的でデータベースストレージをRAMにマウントするために、[`services_tmpfs`](../docker.md#mount-a-directory-in-ram)(Docker executorでのみ使用可能)の代替として使用できます。 `config.toml`ファイルの設定例を次に示します。 ```toml [[runners]] # usual configuration executor = "kubernetes" [runners.kubernetes] [[runners.kubernetes.volumes.empty_dir]] name = "mysql-tmpfs" mount_path = "/var/lib/mysql" medium = "Memory" ``` ### カスタムボリュームマウント {#custom-volume-mount} ジョブのビルドディレクトリを保存するには、設定されている`builds_dir`(デフォルトでは`/builds`)へのカスタムボリュームマウントを定義します。[`pvc`ボリューム](https://kubernetes.io/docs/concepts/storage/persistent-volumes/)を使用する場合、[アクセスモード](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#access-modes)に基づいて、ジョブを1つのノードで実行するように制限されることがあります。 `config.toml`ファイルの設定例を次に示します。 ```toml concurrent = 4 [[runners]] # usual configuration executor = "kubernetes" builds_dir = "/builds" [runners.kubernetes] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/builds" medium = "Memory" ``` ### 並行処理ごとの永続ビルドボリューム {#persistent-per-concurrency-build-volumes} {{< history >}} - `pvc.name`への変数挿入のサポートがGitLab 16.3で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4256)されました。 {{< /history >}} Kubernetes CIジョブのビルドディレクトリは、デフォルトでは一時的です。(`GIT_STRATEGY=fetch`を機能させるために)ジョブ間でGitクローンを永続化する場合は、ビルドフォルダーに対する永続ボリュームクレームをマウントする必要があります。複数のジョブを同時実行できるため、`ReadWriteMany`ボリュームを使用するか、同じRunner上で発生する可能性がある同時実行ジョブごとに1つのボリュームを用意する必要があります。後者の方がパフォーマンの向上を見込めます。このような設定の例を以下に示します。 ```toml concurrent = 4 [[runners]] executor = "kubernetes" builds_dir = "/mnt/builds" [runners.kubernetes] [[runners.kubernetes.volumes.pvc]] # CI_CONCURRENT_ID identifies parallel jobs of the same runner. name = "build-pvc-$CI_CONCURRENT_ID" mount_path = "/mnt/builds" ``` この例では、`build-pvc-3`に対する`build-pvc-0`という名前の永続ボリュームクレームを自分自身で作成します。Runnerの`concurrent`設定で指定されている数だけ作成します。 ### ヘルパーイメージを使用する {#use-a-helper-image} セキュリティポリシーを設定したら、[ヘルパーイメージ](../../configuration/advanced-configuration.md#helper-image)がポリシーに準拠している必要があります。イメージはルートグループから特権を受け取らないため、ユーザーIDがルートグループの一部であることを確認する必要があります。 > [!note] > `nonroot`環境のみが必要な場合は、ヘルパーイメージの代わりに[GitLab Runner UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766421) OpenShiftコンテナプラットフォームイメージを使用できます。あるいは[GitLab Runner Helper UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766433) OpenShift Container Platformイメージを使用することもできます。 次の例では、`nonroot`というユーザーとグループを作成し、そのユーザーとして実行するようにヘルパーイメージを設定します。 ```Dockerfile ARG tag FROM registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp:${tag} USER root RUN groupadd -g 59417 nonroot && \ useradd -u 59417 nonroot -g nonroot WORKDIR /home/nonroot USER 59417:59417 ``` ## ビルドでDockerを使用する {#using-docker-in-builds} ビルドでDockerを使用する場合は、注意すべき点がいくつかあります。 ### `/var/run/docker.sock`の公開 {#exposed-varrundockersock} `runners.kubernetes.volumes.host_path`オプションを使用してホストの`/var/run/docker.sock`をビルドコンテナに公開する場合には、リスクが伴います。本番環境のコンテナと同じクラスターでビルドを実行する場合は注意してください。ノードのコンテナは、ビルドコンテナからアクセスできます。 ### `docker:dind`を使用する {#using-dockerdind} `docker:dind`(`docker-in-docker`イメージとも呼ばれる)を実行する場合、コンテナを特権モードで実行する必要があります。これには潜在的なリスクが伴い、さらに問題が発生する可能性があります。 Dockerデーモンは、通常は`.gitlab-ci.yml`で`service`として起動されるため、ポッド内で個別のコンテナとして実行されます。ポッド内のコンテナは、割り当てられたボリュームとIPアドレスのみを共有します。このIPアドレスは、`localhost`と相互に通信するときに使用されます。`docker:dind`コンテナは`/var/run/docker.sock`を共有せず、`docker`バイナリはデフォルトでそれを使用しようとします。 クライアントがTCPを使用してDockerデーモンと通信するように設定するには、もう一方のコンテナで、ビルドコンテナの環境変数を含めます。 - 非TLS接続の場合は`DOCKER_HOST=tcp://docker:2375`。 - TLS接続の場合は`DOCKER_HOST=tcp://docker:2376`。 Docker 19.03以降では、TLSはデフォルトで有効になっていますが、クライアントに証明書をマップする必要があります。Docker-in-Dockerの非TLS接続を有効にするか、証明書をマウントできます。詳細については、[Docker executorとDocker-in-Dockerの使用](https://docs.gitlab.com/ci/docker/using_docker_build/#use-the-docker-executor-with-docker-in-docker)を参照してください。 ### ホストカーネルの公開を防ぐ {#prevent-host-kernel-exposure} `docker:dind`または`/var/run/docker.sock`を使用する場合、Dockerデーモンはホストマシンの基盤となるカーネルにアクセスできます。つまり、ポッドで設定された`limits`は、Dockerイメージがビルドされるときには機能しません。Dockerデーモンは、Kubernetesによって起動されたDockerビルドコンテナに課せられた制限に関係なく、ノードの全容量をレポートします。 ビルドコンテナを特権モードで実行する場合、または`/var/run/docker.sock`が公開されている場合、ホストカーネルがビルドコンテナに公開される可能性があります。公開を最小限に抑えるには、`node_selector`オプションでラベルを指定します。これにより、ノードにコンテナをデプロイする前に、ノードがラベルと一致することが保証されます。たとえばラベル`role=ci`を指定すると、ビルドコンテナはラベル`role=ci`が付けられたノードでのみ実行され、他のすべての本番サービスは他のノードで実行されます。 ビルドコンテナをさらに分離するには、ノード[taint](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/)を使用します。taintは、他のポッドに追加の設定を行うことなく、他のポッドがビルドポッドと同じノードでスケジュールされることを防ぎます。 ### Dockerイメージとサービスを制限する {#restrict-docker-images-and-services} ジョブの実行に使用されるDockerイメージを制限できます。これを行うには、ワイルドカードパターンを指定します。たとえば、プライベートDockerレジストリのイメージのみを許可するには、次のようにします。 ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_images = ["my.registry.tld:5000/*:*"] allowed_services = ["my.registry.tld:5000/*:*"] ``` あるいはこのレジストリからのイメージの特定のリストに制限するには、次のようにします。 ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_images = ["my.registry.tld:5000/ruby:*", "my.registry.tld:5000/node:*"] allowed_services = ["postgres:9.4", "postgres:latest"] ``` ### Dockerプルポリシーを制限する {#restrict-docker-pull-policies} `.gitlab-ci.yml`ファイルでプルポリシーを指定できます。このポリシーは、CI/CDジョブがイメージをフェッチする方法を決定します。 `.gitlab-ci.yml`ファイルで指定されているものの中から使用できるプルポリシーを制限するには、`allowed_pull_policies`を使用します。 たとえば、`always`プルポリシーと`if-not-present`プルポリシーのみを許可するには、次のようにします。 ```toml [[runners]] (...) executor = "kubernetes" [runners.kubernetes] (...) allowed_pull_policies = ["always", "if-not-present"] ``` - `allowed_pull_policies`を指定しない場合、デフォルトは`pull_policy`キーワードの値になります。 - `pull_policy`を指定しない場合、クラスターのイメージの[デフォルトのプルポリシー](https://kubernetes.io/docs/concepts/containers/images/#updating-images)が使用されます。 - `pull_policy`と`allowed_pull_policies`の両方に含まれているプルポリシーだけがジョブによって使用されます。有効なプルポリシーは、[`pull_policy`キーワード](../docker.md#configure-how-runners-pull-images)に含まれるポリシーを`allowed_pull_policies`と比較することによって決定されます。GitLabでは、これら2つのポリシーリストの[共通部分](https://en.wikipedia.org/wiki/Intersection_(set_theory))が使用されます。たとえば、`pull_policy`が`["always", "if-not-present"]`、`allowed_pull_policies`が`["if-not-present"]`の場合、ジョブでは、両方のリストで定義されている唯一のプルポリシーである`if-not-present`だけが使用されます。 - 既存の`pull_policy`キーワードには、`allowed_pull_policies`で指定されているプルポリシーが少なくとも1つ含まれている必要があります。`pull_policy`の値の中に`allowed_pull_policies`と一致するものがない場合、ジョブは失敗します。 ## ジョブの実行 {#job-execution} GitLab Runnerは、デフォルトで`kube exec`の代わりに`kube attach`を使用します。これにより、不安定なネットワーク環境で[ジョブが途中で成功とマークされる](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4119)などの問題を回避できます。 レガシー実行戦略の削除の進捗については、[イシュー#27976](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27976)を参照してください。 ### Kubernetes APIへのリクエスト試行回数を設定する {#configure-the-number-of-request-attempts-to-the-kubernetes-api} デフォルトでは、Kubernetes executorは、試行が5回失敗すると、Kubernetes APIへの特定のリクエストを再試行します。遅延は、500ミリ秒のフロアと、デフォルト値が2秒のカスタマイズ可能な上限が設定されたバックオフアルゴリズムによって制御されます。再試行回数を設定するには、`config.toml`ファイルで`retry_limit`オプションを使用します。同様に、バックオフ上限には`retry_backoff_max`オプションを使用します。次のエラーは自動的に再試行されます。 - `error dialing backend` - `TLS handshake timeout` - `read: connection timed out` - `connect: connection timed out` - `Timeout occurred` - `http2: client connection lost` - `connection refused` - `tls: internal error` - [`io.unexpected EOF`](https://pkg.go.dev/io#ErrUnexpectedEOF) - [`syscall.ECONNRESET`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.ECONNREFUSED`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.ECONNABORTED`](https://pkg.go.dev/syscall#pkg-constants) - [`syscall.EPIPE`](https://pkg.go.dev/syscall#pkg-constants) 各エラーの再試行回数を制御するには、`retry_limits`オプションを使用します。`rety_limits`は、各エラーの再試行回数を個別に指定するものであり、エラーメッセージと再試行回数のマップです。エラーメッセージは、Kubernetes APIから返されるエラーメッセージのサブ文字列であることがあります。`retry_limits`オプションは`retry_limit`オプションよりも優先されます。 たとえば、環境内のTLS関連のエラーの再試行回数を、デフォルトの5回ではなく10回にするには、`retry_limits`オプションを設定します。 ```toml [[runners]] name = "myRunner" url = "https://gitlab.example.com/" executor = "kubernetes" [runners.kubernetes] retry_limit = 5 [runners.kubernetes.retry_limits] "TLS handshake timeout" = 10 "tls: internal error" = 10 ``` `exceeded quota`などのまったく異なるエラーを20回再試行するには、次のようにします。 ```toml [[runners]] name = "myRunner" url = "https://gitlab.example.com/" executor = "kubernetes" [runners.kubernetes] retry_limit = 5 [runners.kubernetes.retry_limits] "exceeded quota" = 20 ``` ### コンテナのエントリポイントに関する既知の問題 {#container-entrypoint-known-issues} > [!note] > GitLab 15.1以降では、`FF_KUBERNETES_HONOR_ENTRYPOINT`が設定されている場合、Dockerイメージで定義されたエントリポイントがKubernetes executorとともに使用されます。 コンテナのエントリポイントには、次の既知の問題があります。 - イメージのDockerfileにエントリポイントが定義されている場合、有効なShellを開く必要があります。このようにしないとジョブがハングします。 - Shellを開くために、システムはコマンドをビルドコンテナの[`args`](https://kubernetes.io/docs/reference/kubernetes-api/workload-resources/pod-v1/#entrypoint)として渡します。 - [ファイルタイプのCI/CD変数](https://docs.gitlab.com/ci/variables/#use-file-type-cicd-variables)は、エントリポイントの実行時にディスクに書き込まれません。ファイルは、スクリプト実行中にジョブでのみアクセス可能です。 - 次のCI/CD変数は、エントリポイントではアクセスできません。[`before_script`](https://docs.gitlab.com/ci/yaml/#beforescript)を使用して、スクリプトコマンドを実行する前にセットアップに変更を加えることができます。 - [設定で定義されているCI/CD変数](https://docs.gitlab.com/ci/variables/#define-a-cicd-variable-in-the-ui)。 - [マスクされたCI/CD変数](https://docs.gitlab.com/ci/variables/#mask-a-cicd-variable)。 GitLab Runner 17.4より前では次のような状況でした。 - エントリポイントログは、ビルドのログに転送されませんでした。 - Kubernetes executorと`kube exec`を使用した場合、GitLab RunnerはエントリポイントがShellを開くのを待機しませんでした (このセクションの以前の項目を参照)。 GitLab Runner 17.4以降では、エントリポイントログが転送されるようになりました。システムは、エントリポイントが実行され、Shellが起動するまで待ちます。これにより次のような影響があります。 - `FF_KUBERNETES_HONOR_ENTRYPOINT`が設定されていて、イメージのエントリポイントが`poll_timeout`(デフォルトは180秒)より長くかかる場合、ビルドは失敗します。エントリポイントの実行時間が長いことが予想される場合は、`poll_timeout`の値(および場合によっては`poll_interval`の値)を調整する必要があります。 - `FF_KUBERNETES_HONOR_ENTRYPOINT`と`FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY`が設定されている場合、システムはビルドコンテナに[起動プローブ](https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-startup-probes)を追加し、エントリポイントがShellを起動するタイミングを認識します。カスタムエントリポイントが、指定された`args`を使用して想定されるShellを起動する場合、スタートアッププローブは自動的に解決されます。ただし`args`で渡されたコマンドを使用せずにコンテナイメージがShellを起動する場合、エントリポイントは、ビルドディレクトリのルート内に`.gitlab-startup-marker`という名前のファイルを作成して、スタートアッププローブ自体を解決する必要があります。スタートアッププローブは、`poll_interval`ごとに`.gitlab-startup-marker`ファイルを確認します。`poll_timeout`の間にファイルが存在しない場合、ポッドは異常とみなされ、システムはビルドを中断します。 ### ジョブ変数へのアクセスを制限する {#restrict-access-to-job-variables} Kubernetes executorを使用する場合、Kubernetesクラスターへのアクセス権を持つユーザーは、ジョブで使用される変数を読み取ることができます。デフォルトでは、ジョブ変数は以下に保存されます。 - ポッドの環境セクション ジョブ変数データへのアクセスを制限するには、ロールベースのアクセス制御(RBAC)を使用する必要があります。RBACを使用する場合、GitLab Runnerによって使用されるネームスペースにアクセスできるのはGitLab管理者のみです。 他のユーザーがGitLab Runnerネームスペースにアクセスする必要がある場合は、以下の`verbs`を設定して、GitLab Runnerネームスペースのユーザーアクセスを制限します。 - `pods`と`configmaps`の場合 - `get` - `watch` - `list` - `pods/exec`と`pods/attach`の場合は`create`を使用してください。 認可されたユーザーのRBAC定義の例: ```yaml kind: Role apiVersion: rbac.authorization.k8s.io/v1 metadata: name: gitlab-runner-authorized-users rules: - apiGroups: [""] resources: ["configmaps", "pods"] verbs: ["get", "watch", "list"] - apiGroups: [""] resources: ["pods/exec", "pods/attach"] verbs: ["create"] ``` ## 準備ステップでのリソースチェック {#resources-check-during-prepare-step} 前提条件: - `image_pull_secrets`または`service_account`が設定されていること。 - `resource_availability_check_max_attempts`がゼロより大きい数値に設定されていること。 - `get`および`list`権限が付与されているKubernetes `serviceAccount`が使用されていること。 GitLab Runnerは、新しいサービスアカウントまたはシークレットが使用可能かどうかを確認します。この確認操作は5秒間隔で試行されます。 - この機能はデフォルトで無効になっています。この機能を有効にするには、`resource_availability_check_max_attempts`を`0`以外の任意の値に設定します。設定した値によって、Runnerがサービスアカウントまたはシークレットを確認する回数が定義されます。 ### Kubernetesネームスペースを上書きする {#overwrite-the-kubernetes-namespace} 前提条件: - GitLab Runner Helmチャートの`values.yml`ファイルで、`rbac.clusterWideAccess`が`true`に設定されていること。 - Runnerに、コアAPIグループで設定された[権限](#configure-runner-api-permissions)が付与されていること。 Kubernetesネームスペースを上書きして、CIの目的でネームスペースを指定し、ポッドのカスタムセットをこのネームスペースにデプロイできます。CIのステージでコンテナ間のアクセスを有効にするために、Runnerによって起動されたポッドは、上書きされたネームスペース内にあります。 各CI/CDジョブのKubernetesネームスペースを上書きするには、`.gitlab-ci.yml`ファイルで`KUBERNETES_NAMESPACE_OVERWRITE`変数を設定します。 ``` yaml variables: KUBERNETES_NAMESPACE_OVERWRITE: ci-${CI_COMMIT_REF_SLUG} ``` > [!note] > この変数はクラスター上にネームスペースを作成しません。ジョブを実行する前に、ネームスペースが存在することを確認してください。 CI実行中に指定されたネームスペースのみを使用するには、`config.toml`ファイルで`namespace_overwrite_allowed`の正規表現を定義します。 ```toml [runners.kubernetes] ... namespace_overwrite_allowed = "ci-.*" ``` ================================================ FILE: docs-locale/ja-jp/executors/kubernetes/troubleshooting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Kubernetes executorのトラブルシューティング --- Kubernetes executorの使用時に発生する一般的なエラーを以下に示します。 ## `Job failed (system failure): timed out waiting for pod to start` {#job-failed-system-failure-timed-out-waiting-for-pod-to-start} クラスターが`poll_timeout`で定義されたタイムアウトになる前にビルドポッドをスケジュールできない場合、ビルドポッドはエラーを返します。[Kubernetesスケジューラ](https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-lifetime)は、それを削除できる必要があります。 このイシューを修正するには、`config.toml`ファイルの`poll_timeout`値を大きくします。 ## `context deadline exceeded` {#context-deadline-exceeded} ジョブログの`context deadline exceeded`エラーは通常、Kubernetes APIクライアントが特定のクラスターAPIリクエストでタイムアウトになったことを示しています。 兆候がないか、[`kube-apiserver`クラスターコンポーネントのメトリクス](https://kubernetes.io/docs/concepts/cluster-administration/system-metrics/)をチェックします: - 応答レイテンシーの増加。 - ポッド、シークレット、ConfigMap、その他のコア(v1)リソースに対する一般的な作成または削除操作のエラー率。 `kube-apiserver`操作からのタイムアウト駆動型エラーのログは、次のように表示される場合があります: ```plaintext Job failed (system failure): prepare environment: context deadline exceeded Job failed (system failure): prepare environment: setting up build pod: context deadline exceeded ``` 場合によっては、`kube-apiserver`エラー応答は、そのサブコンポーネントの障害(Kubernetesクラスターの`etcdserver`など)に関する追加の詳細を提供する場合があります: ```plaintext Job failed (system failure): prepare environment: etcdserver: request timed out Job failed (system failure): prepare environment: etcdserver: leader changed Job failed (system failure): prepare environment: Internal error occurred: resource quota evaluates timeout ``` これらの`kube-apiserver`サービス障害は、ビルドポッドの作成中、および完了後のクリーンアップ試行中に発生する可能性があります: ```plaintext Error cleaning up secrets: etcdserver: request timed out Error cleaning up secrets: etcdserver: leader changed Error cleaning up pod: etcdserver: request timed out, possibly due to previous leader failure Error cleaning up pod: etcdserver: request timed out Error cleaning up pod: context deadline exceeded ``` ## `Dial tcp xxx.xx.x.x:xxx: i/o timeout` {#dial-tcp-xxxxxxxxxx-io-timeout} これはKubernetesのエラーで、通常、RunnerマネージャーからKubernetes APIサーバーに到達できないことを示します。この問題を解決するには: - ネットワークセキュリティポリシーを使用する場合は、通常、ポート443またはポート6443、あるいはその両方で、Kubernetes APIへのアクセスを許可してください。 - Kubernetes APIが実行されていることを確認してください。 ## Kubernetes APIとの通信を試みるときに接続が拒否されました {#connection-refused-when-attempting-to-communicate-with-the-kubernetes-api} GitLab RunnerがKubernetes APIにリクエストを送信して失敗した場合、[`kube-apiserver`](https://kubernetes.io/docs/concepts/overview/components/#kube-apiserver)が過負荷状態で、APIリクエストを受け付けられない、または処理できないことが原因である可能性があります。 ## `Error cleaning up pod`と`Job failed (system failure): prepare environment: waiting for pod running` {#error-cleaning-up-pod-and-job-failed-system-failure-prepare-environment-waiting-for-pod-running} Kubernetesがジョブポッドをタイムリーにスケジュールできない場合、次のエラーが発生します。GitLab Runnerはポッドの準備ができるのを待ちますが、失敗するとポッドのクリーンアップを試みますが、これも失敗する可能性があります。 ```plaintext Error: Error cleaning up pod: Delete "https://xx.xx.xx.x:443/api/v1/namespaces/gitlab-runner/runner-0001": dial tcp xx.xx.xx.x:443 connect: connection refused Error: Job failed (system failure): prepare environment: waiting for pod running: Get "https://xx.xx.xx.x:443/api/v1/namespaces/gitlab-runner/runner-0001": dial tcp xx.xx.xx.x:443 connect: connection refused ``` トラブルシューティングを行うには、Kubernetesのプライマリノードと、[`kube-apiserver`](https://kubernetes.io/docs/concepts/overview/components/#kube-apiserver)インスタンスを実行するすべてのノードを確認してください。クラスター上でスケールアップしたいターゲットポッド数を管理するために必要なすべてのリソースがそれらに備わっていることを確認してください。 GitLab Runnerがポッドが`Ready`ステータスに達するまで待機する時間を変更するには、[`poll_timeout`](_index.md#other-configtoml-settings)設定を使用します。 ポッドがどのようにスケジュールされるか、または時間どおりにスケジュールされない理由をよりよく理解するには、[Kubernetesスケジューラについてお読みください](https://kubernetes.io/docs/concepts/scheduling-eviction/kube-scheduler/)。 ## `request did not complete within requested timeout` {#request-did-not-complete-within-requested-timeout} ビルドポッドの作成中に観測されたメッセージ`request did not complete within requested timeout`は、Kubernetesクラスターで構成された[アドミッションコントロールWebhook](https://kubernetes.io/docs/reference/access-authn-authz/extensible-admission-controllers/)がタイムアウトしていることを示します。 アドミッションコントロールWebhookは、スコープが設定されているすべてのAPIリクエストに対するクラスターレベルの管理制御インターセプトであり、時間内に実行されない場合、障害を引き起こす可能性があります。 アドミッションコントロールWebhookは、傍受するAPIリクエストとネームスペースネームスペースソースをきめ細かく制御できるフィルターをサポートしています。GitLab RunnerからのKubernetes API呼び出しがアドミッションコントロールWebhookを通過する必要がない場合は、GitLab Runnerネームスペースを無視するように[Webhookのセレクター/フィルターの構成](https://kubernetes.io/docs/reference/access-authn-authz/extensible-admission-controllers/#matching-requests-objectselector)を変更するか、[GitLab Runner Helmチャート`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/57e026d7f43f63adc32cdd2b21e6d450abcf0686/values.yaml#L490-500)で`podAnnotations`または`podLabels`を構成して、GitLab Runnerポッドに除外ラベル/注釈を適用できます。 たとえば、[DataDogアドミッションコントロールWebhook](https://docs.datadoghq.com/containers/cluster_agent/admission_controller/?tab=operator)がGitLab Runnerマネージャーポッドによって行われたAPIリクエストを傍受しないようにするには、次を追加できます: ```yaml podLabels: admission.datadoghq.com/enabled: false ``` KubernetesクラスターのアドミッションコントロールWebhookを一覧表示するには、次を実行します: ```shell kubectl get validatingwebhookconfiguration -o yaml kubectl get mutatingwebhookconfiguration -o yaml ``` アドミッションコントロールWebhookがタイムアウトすると、次の形式のログが確認できます: ```plaintext Job failed (system failure): prepare environment: Timeout: request did not complete within requested timeout Job failed (system failure): prepare environment: setting up credentials: Timeout: request did not complete within requested timeout ``` アドミッションコントロールWebhookからの障害は、代わりに次のように表示される場合があります: ```plaintext Job failed (system failure): prepare environment: setting up credentials: Internal error occurred: failed calling webhook "example.webhook.service" ``` ## エラー`Could not resolve host: example.com` {#error-could-not-resolve-host-examplecom} [ヘルパーイメージ](../../configuration/advanced-configuration.md#helper-image)の`alpine`フレーバーを使用している場合、Alpineの`musl`のDNSリゾルバーに関連する[DNSイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4129)が発生する可能性があります。エラーは次のように表示される場合があります: - `fatal: unable to access 'https://gitlab-ci-token:token@example.com/repo/proj.git/': Could not resolve host: example.com` このイシューを解決するには、`helper_image_flavor = "ubuntu"`オプションを使用します。 ## `docker: Cannot connect to the Docker daemon at tcp://docker:2375. Is the docker daemon running?` {#docker-cannot-connect-to-the-docker-daemon-at-tcpdocker2375-is-the-docker-daemon-running} このエラーは、[Docker-in-Docker](_index.md#using-dockerdind)を使用している場合に、DINDサービスが完全に起動する前にアクセスしようとすると発生する可能性があります。詳細については、[このイシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/27215)を参照してください。 ## `curl: (35) OpenSSL SSL_connect: SSL_ERROR_SYSCALL in connection to github.com:443` {#curl-35-openssl-ssl_connect-ssl_error_syscall-in-connection-to-githubcom443} このエラーは、[Docker-in-Docker](_index.md#using-dockerdind)を使用している場合に、DINDの最大転送ユニット(MTU)がKubernetesオーバーレイネットワークよりも大きい場合に発生する可能性があります。DINDはデフォルトのMTU 1500を使用しますが、これはデフォルトのオーバーレイネットワーク全体をルーティングするには大きすぎます。DIND MTUは、サービス定義内で変更できます: ```yaml services: - name: docker:dind command: ["--mtu=1450"] ``` ## `MountVolume.SetUp failed for volume "kube-api-access-xxxxx" : chown is not supported by windows` {#mountvolumesetup-failed-for-volume-kube-api-access-xxxxx--chown-is-not-supported-by-windows} CI/CDジョブを実行すると、次のようなエラーが発生する可能性があります: ```plaintext MountVolume.SetUp failed for volume "kube-api-access-xxxxx" : chown c:\var\lib\kubelet\pods\xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx\volumes\kubernetes.io~projected\kube-api-access-xxxxx\..2022_07_07_20_52_19.102630072\token: not supported by windows ``` このイシューは、[ノードセレクターを使用](_index.md#specify-the-node-to-execute-builds)して、異なるオペレーティングシステムとアーキテクチャを持つノードでビルドを実行する場合に発生します。 このイシューを修正するには、Runnerマネージャーポッドが常にLinuxノードでスケジュールされるように`nodeSelector`を構成します。たとえば、[`values.yaml`ファイル](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml)には、次のものが含まれている必要があります: ```yaml nodeSelector: kubernetes.io/os: linux ``` ## ビルドポッドにRunner IAMロールではなく、ワーカーノードのIAMロールが割り当てられています {#build-pods-are-assigned-the-worker-nodes-iam-role-instead-of-runner-iam-role} このイシューは、ワーカーノードのIAMロールに正しいロールを引き受ける権限がない場合に発生します。これを修正するには、`sts:AssumeRole`権限をワーカーノードのIAMロールの信頼関係に追加します: ```json { "Effect": "Allow", "Principal": { "AWS": "arn:aws:iam:::role/" }, "Action": "sts:AssumeRole" } ``` ## エラー: `pull_policy ([Always]) defined in GitLab pipeline config is not one of the allowed_pull_policies` {#error-pull_policy-always-defined-in-gitlab-pipeline-config-is-not-one-of-the-allowed_pull_policies} このイシューは、`.gitlab-ci.yml`で`pull_policy`を指定したが、Runnerの構成ファイルに構成されたポリシーがない場合に発生します。エラーは次のように表示される場合があります: - `Preparation failed: invalid pull policy for image 'image-name:latest': pull_policy ([Always]) defined in GitLab pipeline config is not one of the allowed_pull_policies ([])` このイシューを修正するには、[Dockerプルポリシーの制限](_index.md#restrict-docker-pull-policies)に従って、構成に`allowed_pull_policies`を追加します。 ## バックグラウンドプロセスによりジョブがハングアップし、タイムアウトになります {#background-processes-cause-jobs-to-hang-and-timeout} ジョブの実行中に開始されたバックグラウンドプロセスは、[ビルドジョブが終了するのを防ぐ](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/2880)ことができます。これを回避するには、次のことができます: - プロセスをダブルフォークします。例: `command_to_run < /dev/null &> /dev/null &`。 - ジョブスクリプトを終了する前にプロセスを強制終了します。 ## キャッシュ関連の`permission denied`エラー {#cache-related-permission-denied-errors} ジョブで生成されるファイルとフォルダーには、特定のUNIX所有権と権限があります。ファイルとフォルダーがアーカイブまたは抽出されると、UNIXの詳細が保持されます。ただし、ファイルとフォルダーは、[ヘルパーイメージ](../../configuration/advanced-configuration.md#helper-image)の`USER`構成と一致しない場合があります。 `Creating cache ...`ステップで権限関連のエラーが発生した場合は、次のことができます: - 解決策として、ソースデータが変更されているかどうかを調査します。たとえば、キャッシュされたファイルを作成するジョブスクリプトなどです。 - 回避策として、一致する[chown](https://linux.die.net/man/1/chown)コマンドと[chmod](https://linux.die.net/man/1/chmod)コマンドを追加します。 [(`before_`/`after_`)`script:`ディレクティブ](https://docs.gitlab.com/ci/yaml/#default)へ。 ## 初期化システムを備えたビルドコンテナ内の明らかに冗長なシェルプロセス {#apparently-redundant-shell-process-in-build-container-with-init-system} プロセスツリーには、次のいずれかの場合にシェルプロセスが含まれる場合があります: - `FF_USE_LEGACY_KUBERNETES_EXECUTION_STRATEGY`が`false`で、`FF_USE_DUMB_INIT_WITH_KUBERNETES_EXECUTOR`が`true`の場合。 - ビルドイメージの`ENTRYPOINT`は、初期化システム(`tini-init`や`dumb-init`など)です。 ```shell UID PID PPID C STIME TTY TIME CMD root 1 0 0 21:58 ? 00:00:00 /scripts-37474587-5556589047/dumb-init -- sh -c if [ -x /usr/local/bin/bash ]; then .exec /usr/local/bin/bash elif [ -x /usr/bin/bash ]; then .exec /usr/bin/bash elif [ -x /bin/bash ]; then .exec /bin/bash elif [ -x /usr/local/bin/sh ]; then .exec /usr/local/bin/sh elif [ -x /usr/bin/sh ]; then .exec /usr/bin/sh elif [ -x /bin/sh ]; then .exec /bin/sh elif [ -x /busybox/sh ]; then .exec /busybox/sh else .echo shell not found .exit 1 fi root 7 1 0 21:58 ? 00:00:00 /usr/bin/bash <---------------- WHAT IS THIS??? root 26 1 0 21:58 ? 00:00:00 sh -c (/scripts-37474587-5556589047/detect_shell_script /scripts-37474587-5556589047/step_script 2>&1 | tee -a /logs-37474587-5556589047/output.log) & root 27 26 0 21:58 ? 00:00:00 \_ /usr/bin/bash /scripts-37474587-5556589047/step_script root 32 27 0 21:58 ? 00:00:00 | \_ /usr/bin/bash /scripts-37474587-5556589047/step_script root 37 32 0 21:58 ? 00:00:00 | \_ ps -ef --forest root 28 26 0 21:58 ? 00:00:00 \_ tee -a /logs-37474587-5556589047/output.log ``` このシェルプロセスは、`sh`、`bash`、または`busybox`の可能性があり、`PPID`が1、`PID`が6または7の場合、初期化システムによって実行されるシェル検出スクリプトによって開始されるシェルです(上記の`PID` 1)。このプロセスは冗長ではなく、ビルドコンテナが初期化システムで実行されている場合の典型的な操作です。 ## Runnerポッドは、登録が成功したにもかかわらず、ジョブの結果を実行できず、タイムアウトになります {#runner-pod-fails-to-run-job-results-and-times-out-despite-successful-registration} RunnerポッドはGitLabに登録すると、ジョブの実行を試みますが、実行されず、最終的にジョブはタイムアウトになります。次のエラーが報告されます: ```plaintext There has been a timeout failure or the job got stuck. Check your timeout limits or try again. This job does not have a trace. ``` この場合、Runnerは次のエラーを受け取る可能性があります。 ```plaintext HTTP 204 No content response code when connecting to the `jobs/request` API. ``` このイシューのトラブルシューティングを行うには、APIにPOSTリクエストを手動で送信して、TCP接続がハングしているかどうかを検証します。TCP接続がハングしている場合、RunnerはCIジョブペイロードをリクエストできない可能性があります。 ## `failed to reserve container name` (`gcs-fuse-csi-driver`が使用されている場合) {#failed-to-reserve-container-name-for-init-permissions-container-when-gcs-fuse-csi-driver-is-used} `gcs-fuse-csi-driver` `csi`ドライバーは、[initコンテナのボリュームのマウントをサポートしていません](https://github.com/GoogleCloudPlatform/gcs-fuse-csi-driver/issues/38)。これにより、このドライバーを使用するときにinitコンテナの起動が失敗する可能性があります。[Kubernetes 1.28で導入された](https://kubernetes.io/blog/2023/08/25/native-sidecar-containers/)機能は、このバグを解決するために、ドライバーのプロジェクトでサポートされている必要があります。 ## エラー: `only read-only root filesystem container is allowed` {#error-only-read-only-root-filesystem-container-is-allowed} 読み取り専用でマウントされたルートファイルシステム上でコンテナを実行するように強制するアドミッションコントロールポリシーを持つクラスターでは、このエラーは次の場合に表示される可能性があります: - GitLab Runnerをインストールします。 - GitLab Runnerがビルドポッドをスケジュールしようとします。 これらのアドミッションコントロールポリシーは通常、[Gatekeeper](https://open-policy-agent.github.io/gatekeeper/website/)や[Kyverno](https://kyverno.io/)などのアドミッションコントロールコントローラーによって適用されます。たとえば、読み取り専用のルートファイルシステム上でコンテナを実行するように強制するポリシーは、[`readOnlyRootFilesystem`](https://open-policy-agent.github.io/gatekeeper-library/website/validation/read-only-root-filesystem/) Gatekeeperポリシーです。 この問題を解決するには: - クラスターにデプロイされたすべてのポッドは、アドミッションコントロールコントローラーがポッドをブロックしないように、`securityContext.readOnlyRootFilesystem`をコンテナの`true`に設定して、アドミッションコントロールポリシーに準拠する必要があります。 - ルートファイルシステムが読み取り専用でマウントされていても、コンテナは正常に実行され、ファイルシステムに書き込むことができる必要があります。 ### GitLab Runnerの場合 {#for-gitlab-runner} [GitLab Runner Helmチャート](../../install/kubernetes.md)でGitLab Runnerがデプロイされている場合、次のものを持つようにGitLabチャートの構成を更新する必要があります: - 適切な`securityContext`値: ```yaml <...> securityContext: readOnlyRootFilesystem: true <...> ``` - ポッドが書き込める場所にマウントされた書き込み可能なファイルシステム: ```yaml <...> volumeMounts: - name: tmp-dir mountPath: /tmp volumes: - name: tmp-dir emptyDir: medium: "Memory" <...> ``` ### ビルドポッドの場合 {#for-the-build-pod} ビルドポッドを読み取り専用のルートファイルシステムで実行するには、`config.toml`で異なるコンテナのセキュリティコンテキストを構成します。ビルドポッドに渡されるGitLabチャート変数`runners.config`を設定できます: ```yaml runners: config: | <...> [[runners]] [runners.kubernetes.build_container_security_context] read_only_root_filesystem = true [runners.kubernetes.init_permissions_container_security_context] read_only_root_filesystem = true [runners.kubernetes.helper_container_security_context,omitempty] read_only_root_filesystem = true # This section is only needed if jobs with services are used [runners.kubernetes.service_container_security_context,omitempty] read_only_root_filesystem = true <...> ``` ビルドポッドとそのコンテナを読み取り専用ファイルシステム上で正常に実行するには、ビルドポッドが書き込める場所に書き込み可能なファイルシステムが必要です。少なくとも、これらの場所はビルドおよびホームディレクトリです。ビルドプロセスに、必要に応じて他の場所への書き込みアクセス権があることを確認してください。 一般に、ホームディレクトリは、プログラムが正常な実行に必要な構成やその他のデータを保存できるように、書き込み可能である必要があります。`git`バイナリは、ホームディレクトリに書き込むことができると予想されるプログラムの一例です。 異なるコンテナイメージでのパスに関係なく、ホームディレクトリを書き込み可能にするには: 1. (どのビルドイメージを使用しているかに関係なく)安定したパスにボリュームをマウントします。 1. すべてのビルドに対して、環境変数`$HOME`をグローバルに設定して、ホームディレクトリを変更します。 GitLabチャート変数`runners.config`の値を更新することにより、`config.toml`でビルドポッドとそのコンテナを構成できます。 ```yaml runners: config: | <...> [[runners]] environment = ["HOME=/build_home"] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/builds" [[runners.kubernetes.volumes.empty_dir]] name = "build-home" mount_path = "/build_home" <...> ``` {{< alert type="note" >}} `emptyDir`の代わりに、他の[サポートされているボリュームタイプ](_index.md#configure-volume-types)を使用できます。明示的に処理され、ビルド成果物として保存されないすべてのファイルは通常一時的であるため、ほとんどの場合`emptyDir`が機能します。 {{< /alert >}} ## AWS EKS: ポッドのクリーンアップエラー: 「Runner - \*\*」が見つからない、またはステータスが「失敗」 {#aws-eks-error-cleaning-up-pod-pods-runner--not-found-or-status-is-failed} Amazon EKSゾーンのリバランシング機能は、オートスケールグループ内のAvailability Zoneのバランスを取ります。この機能は、あるAvailability Zoneのノードを停止し、別のAvailability Zoneで作成する可能性があります。 Runnerジョブを停止して別のノードに移動することはできません。このエラーを解決するには、Runnerジョブに対してこの機能を無効にします。 ## Windowsコンテナではサポートされていないサービス {#services-not-supported-with-windows-containers} Windowsノードで[サービス](https://docs.gitlab.com/ci/services/)を使用しようとすると、次のエラーで失敗する可能性があります: - `ERROR: Job failed (system failure): prepare environment: admission webhook "windows.common-webhooks.networking.gke.io" denied the request: spec.hostAliases: Invalid value: []v1.HostAlias{v1.HostAlias{IP:"127.0.0.1", Hostnames:[]string{""}}}: Windows does not support this field.` Kubernetesランタイムによっては、エラーが報告されるか、黙って無視される可能性があります。たとえば、GKEはエラーを報告します。 サービスは、Kubernetes executorの`hostAlias`を使用して実装されます。これは、Windowsコンテナではサポートされていません。 ================================================ FILE: docs-locale/ja-jp/executors/kubernetes/use_podman_with_kubernetes.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Kubernetes上のGitLab RunnerでPodmanを使用する --- Podmanは、オープンソースの[Open Container Initiative](https://opencontainers.org/)(OCI)ツールで、コンテナを開発、管理、実行するために使用されます。 Podmanは、ルートユーザーやホストでの[特権](../../security/_index.md#usage-of-docker-executor)エスカレーションなしに、CIジョブでコンテナイメージをビルドできる設定を提供します。 このドキュメントでは、OpenShiftおよび非OpenShift KubernetesクラスターでGitLab Runnerで使用するためにPodmanを設定する方法について説明します。この設定は、ルートおよび非ルートユーザーとして設定されたコンテナイメージに適用されます。 ## 非OpenShift KubernetesクラスターでのPodmanの実行 {#run-podman-on-non-openshift-kubernetes-cluster} ### `--privileged`フラグを`true`に設定した状態で、非ルートユーザーとしてPodmanを実行します {#run-podman-as-a-non-root-user-with-the---privileged-flag-set-to-true} {{< alert type="warning" >}} `--privileged`フラグを`true`に設定してPodmanを実行すると、コンテナエンジンは追加のセキュリティ制御の有無にかかわらずコンテナを起動します。 {{< /alert >}} 非ルートコンテナプロセスを持つ非ルートユーザーとしてPodmanを実行するには: 1. `.gitlab-ci.yml`ファイルで次のサンプルコードを使用して、Podmanでコンテナイメージを作成します: ```yaml variables: FF_USE_POWERSHELL_PATH_RESOLVER: "true" FF_RETRIEVE_POD_WARNING_EVENTS: "true" FF_PRINT_POD_EVENTS: "true" FF_SCRIPT_SECTIONS: "true" CI_DEBUG_SERVICES: "true" GIT_DEPTH: 5 HOME: /my_custom_dir DOCKER_HOST: tcp://docker:2375 podman-privileged-test: image: quay.io/podman/stable before_script: - podman info - id script: - podman build . -t playground-bis:testing ``` 1. 次の設定を`config.toml`ファイルに追加して、デフォルトの`user_id`を`1000`に設定します: ```ini [runners.kubernetes.pod_security_context] run_as_user = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 1000 ``` 1. 次のRunnerの設定を`config.toml`ファイルに追加します: ```toml listen_address = ":9252" concurrent = 3 check_interval = 1 log_level = "debug" log_format = "runner" connection_max_age = "15m0s" shutdown_timeout = 0 [session_server] session_timeout = 1800 [[runners]] name = "investigation" limit = 50 url = "https://gitlab.com/" id = 0 token = "glrt-REDACTED" token_obtained_at = 2024-09-30T14:38:04.623237Z executor = "kubernetes" builds_dir = "/my_custom_dir" shell = "bash" [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false image = "" namespace = "" namespace_overwrite_allowed = "" namespace_per_job = false privileged = true node_selector_overwrite_allowed = ".*" node_tolerations_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" [runners.kubernetes.pod_labels] user = "ratchade" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "repo" mount_path = "/my_custom_dir" [runners.kubernetes.pod_security_context] run_as_user = 1000 [runners.kubernetes.build_container_security_context] run_as_user = 1000 [[runners.kubernetes.services]] name = "" [runners.kubernetes.dns_config] ``` ジョブが期待どおりに合格した場合、ジョブログは次の例のようになります: ```shell ... $ podman build . -t playground-bis:testing STEP 1/6: FROM docker.io/library/golang:1.24.4 AS builder Trying to pull docker.io/library/golang:1.24.4... Getting image source signatures Copying blob sha256:6564e0d9b89ebe3e93013c7d7fbf4d560c5831ed61448167899654bf22c6dc59 Copying blob sha256:2b238499ec52e0d6be479f948c76ba0bc3cc282f612d5a6a4b5ef52ff45f6b2c Copying blob sha256:6d11c181ebb38ef30f2681a42f02030bc6fdcfbe9d5248270ee065eb7302b500 Copying blob sha256:600c2555aee6a6bed84df8b8e456b2d705602757d42f5009a41b03abceff02f8 Copying blob sha256:41b754d079e82fafdf15447cfc188868092eaf1cf4a3f96c9d90ab1b7db91230 Copying blob sha256:a355a3cac949bed5cda9c62103ceb0f004727cedcd2a17d7c9836aea1a452fda Copying blob sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 Copying config sha256:723e5b94e776fd1a0d4e9bb860400f02acbe62cdac487f114f5bd6303d76fbd9 Writing manifest to image destination STEP 2/6: WORKDIR "/workspace" --> 32b9a99335a7 STEP 3/6: COPY . . --> 3de77f571048 STEP 4/6: RUN go build -v main.go internal/unsafeheader internal/goarch internal/cpu internal/abi internal/bytealg internal/byteorder internal/chacha8rand internal/coverage/rtcov internal/godebugs internal/goexperiment internal/goos internal/profilerecord internal/runtime/atomic internal/runtime/syscall internal/stringslite internal/runtime/exithook runtime/internal/math runtime/internal/sys cmp internal/itoa internal/race runtime math/bits math unicode/utf8 sync/atomic unicode internal/asan internal/msan internal/reflectlite iter sync slices errors internal/bisect strconv io internal/oserror path internal/godebug syscall reflect time io/fs internal/filepathlite internal/syscall/unix internal/poll internal/fmtsort internal/syscall/execenv internal/testlog os fmt command-line-arguments --> 6340b6cccaa9 STEP 5/6: RUN ls -halF total 2.2M drwxr-xr-x 1 root root 4.0K Oct 3 15:14 ./ dr-xr-xr-x 1 root root 4.0K Oct 3 15:14 ../ drwxrwxrwx 6 root root 4.0K Oct 3 15:14 .git/ -rw-rw-rw- 1 root root 690 Oct 3 15:14 .gitlab-ci.yml -rw-rw-rw- 1 root root 1.8K Oct 3 15:14 Dockerfile -rw-rw-rw- 1 root root 74 Oct 3 15:14 Dockerfile_multistage -rw-rw-rw- 1 root root 18 Oct 3 15:14 README.md -rw-rw-rw- 1 root root 51 Oct 3 15:14 go.mod -rw-rw-rw- 1 root root 258 Oct 3 15:14 long-script-with-cleanup.sh -rwxr-xr-x 1 root root 2.1M Oct 3 15:14 main* -rw-rw-rw- 1 root root 157 Oct 3 15:14 main.go -rw-rw-rw- 1 root root 333 Oct 3 15:14 string_output.sh drwxrwxrwx 2 root root 4.0K Oct 3 15:14 test/ --> e3cce3e2b16a STEP 6/6: CMD ["exec", "main"] COMMIT playground-bis:testing --> 2bf7283ee21d Successfully tagged localhost/playground-bis:testing 2bf7283ee21dd86134fbda06a5835af4b68fe3dc6a3525b96587e14c40d7f1a3 Cleaning up project directory and file based variables 00:01 Job succeeded ``` ### `--privileged`フラグを`false`に設定した状態で、ルートユーザーとしてPodmanを実行します {#run-podman-as-a-root-user-with-the---privileged-flag-set-to-false} 前提要件: - コンテナ内で`fuse-overlayfs`を使用する権限。 以下の手順は、[Kubernetes内でのPodmanの使用方法](https://www.redhat.com/en/blog/podman-inside-kubernetes)の「特権フラグなしのルートレスPodman」セクションから引用したものです。 ルートレスPodmanを実行する場合、システムの設定をいくつか調整することで、特権フラグを削除できます。コンテナは、コンテナ内で`fuse-overlayfs`を使用するために`/dev/fuse`へのアクセスが必要です。 Kubernetesクラスターを実行しているホストでSELinuxも無効にする必要があります。SELinuxは、コンテナ化されたプロセスが、コンテナ内の必要なファイルシステムをマウントできないようにします。 これを実現するには、次のようにします: 1. たとえば、ジョブポッドで使用できるデバイスプラグインを作成します: ```yaml apiVersion: apps/v1 kind: DaemonSet metadata: name: fuse-device-plugin-daemonset namespace: kube-system spec: selector: matchLabels: name: fuse-device-plugin-ds template: metadata: labels: name: fuse-device-plugin-ds spec: hostNetwork: true containers: - image: soolaugust/fuse-device-plugin:v1.0 name: fuse-device-plugin-ctr securityContext: allowPrivilegeEscalation: false capabilities: drop: ["ALL"] volumeMounts: - name: device-plugin mountPath: /var/lib/kubelet/device-plugins volumes: - name: device-plugin hostPath: path: /var/lib/kubelet/device-plugins ``` 1. クラスターにGitLab Runnerをインストールするように`config.toml`を設定します。 - `--privileged`フラグを`false`に設定した状態で、`root`ユーザーとして実行するようにジョブポッドを設定します: ```toml allow_privilege_escalation = false [runners.kubernetes.pod_security_context] run_as_non_root = false [runners.kubernetes.build_container_security_context] run_as_user = 0 run_as_group = 0 ``` - [`pod_spec`機能](_index.md#overwrite-generated-pod-specifications)を使用して、ジョブポッドにリソース制限を設定します。`pod_spec`を使用するには、`FF_USE_ADVANCED_POD_SPEC_CONFIGURATION`機能フラグを`true`に設定します。 ```toml [[runners.kubernetes.pod_spec]] name = "device-fuse" patch_type = "strategic" patch = ''' containers: - name: build resources: limits: github.com/fuse: 1 ''' ``` `config.toml`は次のようになります: ```toml [[runners]] [runners.kubernetes] host = "" bearer_token_overwrite_allowed = false pod_termination_grace_period_seconds = 0 namespace = "" namespace_overwrite_allowed = "" pod_labels_overwrite_allowed = "" service_account_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" node_selector_overwrite_allowed = ".*" allow_privilege_escalation = false [runners.kubernetes.pod_security_context] run_as_non_root = false [runners.kubernetes.build_container_security_context] run_as_user = 0 run_as_group = 0 [[runners.kubernetes.services]] [runners.kubernetes.dns_config] [runners.kubernetes.pod_labels] [[runners.kubernetes.pod_spec]] name = "device-fuse" patch_type = "strategic" patch = ''' containers: - name: build resources: limits: github.com/fuse: 1 ''' ``` 1. ジョブを実行して、Podmanでイメージをビルドします。 ```yaml variables: FF_USE_POWERSHELL_PATH_RESOLVER: "true" FF_RETRIEVE_POD_WARNING_EVENTS: "true" FF_PRINT_POD_EVENTS: "true" FF_SCRIPT_SECTIONS: "true" CI_DEBUG_SERVICES: "true" GIT_DEPTH: 5 FF_USE_ADVANCED_POD_SPEC_CONFIGURATION: "true" podman-privileged-test: image: quay.io/podman/stable before_script: - podman info - id script: - podman build . -t playground-bis:testing ``` このジョブは`podman build`を実行し、正常に完了するはずです。 ```shell ... $ podman build . -t playground-bis:testing time="2024-11-06T16:57:41Z" level=warning msg="Using cgroups-v1 which is deprecated in favor of cgroups-v2 with Podman v5 and will be removed in a future version. Set environment variable `PODMAN_IGNORE_CGROUPSV1_WARNING` to hide this warning." time="2024-11-06T16:57:41Z" level=warning msg="Using cgroups-v1 which is deprecated in favor of cgroups-v2 with Podman v5 and will be removed in a future version. Set environment variable `PODMAN_IGNORE_CGROUPSV1_WARNING` to hide this warning." STEP 1/6: FROM docker.io/library/golang:1.24.4 AS builder Trying to pull docker.io/library/golang:1.24.4... Getting image source signatures Copying blob sha256:32d3574b34bd65a6cf89a80e5bd939574c7a9bd3efbaa4881292aaca16d3d0dc Copying blob sha256:a47cff7f31e941e78bf63ca19f0811b675283e2c00ddea10c57f78d93b2bc343 Copying blob sha256:cdd62bf39133c498a16f7a7b1b6555ba43d02b2511c508fa4c0a9b1975ffe20e Copying blob sha256:1eb015951d08f558e9805d427f6d30728b0cd94d5c9b9538cd4f7df57598664a Copying blob sha256:a173f2aee8e962ea19db1e418ae84a0c9f71480b51f768a19332dfa83d7722a5 Copying blob sha256:e7bff916ab0c126c9d943f0c481a905f402e00f206a89248f257ef90beaabbd8 Copying blob sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 Copying config sha256:8027d6b1a7f0702ed8a4174fd022be03f87e35c7a7fa00afb2bf4178b22080d4 Writing manifest to image destination STEP 2/6: WORKDIR "/workspace" --> 94b34d00b2cb STEP 3/6: COPY . . --> b807785fe549 STEP 4/6: RUN go build -v main.go internal/goarch internal/unsafeheader internal/cpu internal/abi internal/bytealg internal/byteorder internal/chacha8rand internal/coverage/rtcov internal/godebugs internal/goexperiment internal/goos internal/profilerecord internal/runtime/atomic internal/runtime/syscall internal/runtime/exithook internal/stringslite runtime/internal/math runtime/internal/sys cmp internal/itoa internal/race runtime math/bits math unicode/utf8 sync/atomic unicode internal/asan internal/msan iter internal/reflectlite sync slices internal/bisect errors strconv io internal/oserror path internal/godebug reflect syscall time io/fs internal/fmtsort internal/filepathlite internal/syscall/unix internal/syscall/execenv internal/testlog internal/poll os fmt command-line-arguments --> 5c4fa8b22a3e STEP 5/6: RUN ls -halF total 2.1M drwxr-xr-x 4 root root 18 Nov 6 16:58 ./ dr-xr-xr-x 19 root root 6 Nov 6 16:58 ../ drwxrwxrwx 6 root root 128 Nov 6 16:57 .git/ -rw-rw-rw- 1 root root 743 Nov 6 16:57 .gitlab-ci.yml -rw-rw-rw- 1 root root 1.8K Nov 6 16:57 Dockerfile -rw-rw-rw- 1 root root 74 Nov 6 16:57 Dockerfile_multistage -rw-rw-rw- 1 root root 18 Nov 6 16:57 README.md -rw-rw-rw- 1 root root 51 Nov 6 16:57 go.mod -rw-rw-rw- 1 root root 258 Nov 6 16:57 long-script-with-cleanup.sh -rwxr-xr-x 1 root root 2.1M Nov 6 16:58 main* -rw-rw-rw- 1 root root 157 Nov 6 16:57 main.go -rw-rw-rw- 1 root root 333 Nov 6 16:57 string_output.sh drwxrwxrwx 2 root root 87 Nov 6 16:57 test/ --> 57bb3eb7e929 STEP 6/6: CMD ["exec", "main"] COMMIT playground-bis:testing --> 2cc55d032ba8 Successfully tagged localhost/playground-bis:testing 2cc55d032ba852e05c513e4067b55c10fd697c65e07ffe2aae104e8531702274 Cleaning up project directory and file based variables 00:00 Job succeeded ``` ## OpenShiftで非ルートユーザーとしてPodmanを実行する {#run-podman-as-a-non-root-user-on-openshift} 特権コンテナなしでルートレスPodmanを実行するには、RedHatの記事[GitLab RunnerとしてPodmanを使用してOpenShiftでコンテナイメージをビルドする](https://developers.redhat.com/articles/2024/10/01/build-container-images-openshift-using-podman-gitlab-runner)の手順に従ってください。 ## トラブルシューティング {#troubleshooting} ### 非ルートユーザーとしてジョブを実行すると、`git`が`/.gitconfig`に設定を保存できません {#git-cannot-save-the-configuration-in-gitconfig-when-you-run-the-job-as-a-non-root-user} ジョブをルートとして実行していないため、`git`は`/.gitconfig`に設定を保存できません。その結果、次のエラーが発生する可能性があります: ```shell Getting source from Git repository 00:00 error: could not lock config file //.gitconfig: Permission denied ``` このエラーを防ぐには: 1. `emptyDir`ボリュームを`/my_custom_dir`にマウントします。 1. `HOME`環境変数を`/my_custom_dir`パスに設定します。 ================================================ FILE: docs-locale/ja-jp/executors/parallels.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Parallels --- Parallels executorは、macOS上の仮想マシン(VM)でCI/CDジョブを実行するために、[Parallels Desktop](https://www.parallels.com/)仮想化ソフトウェアを使用します。Parallels Desktopは、macOSと並行してWindows、Linux、およびその他のオペレーティングシステムを実行できます。 Parallels executorは、VirtualBox executorと同様に動作します。仮想マシンを作成および管理し、GitLab CI/CDジョブを実行します。各ジョブはクリーンなVM環境で実行され、ビルド間の分離を提供します。設定情報については、[VirtualBox executor](virtualbox.md)を参照してください。 {{< alert type="note" >}} Parallels executorはローカルキャッシュをサポートしていません。[分散キャッシュ](../configuration/speed_up_job_execution.md)がサポートされています。 {{< /alert >}} ================================================ FILE: docs-locale/ja-jp/executors/shell.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Shell executor --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} Shell executorを使用すると、GitLab Runnerがインストールされているマシン上でローカルにビルドを実行できます。Shell executorは、Runnerをインストールできるすべてのシステムをサポートしています。つまり、Bash、PowerShell Core、Windows PowerShell、およびWindows Batch(非推奨)向けに生成されたスクリプトを使用できます。 {{< alert type="note" >}} GitLab RunnerがShell executorを使用するマシンで、[一般的な前提要件](_index.md#prerequisites-for-non-docker-executors)を満たしていることを確認してください。 {{< /alert >}} ## 特権ユーザーとしてスクリプトを実行する {#run-scripts-as-a-privileged-user} `--user`を[`gitlab-runner run`コマンド](../commands/_index.md#gitlab-runner-run)に追加すると、スクリプトを非特権ユーザーとして実行できます。この機能はBashでのみサポートされています。 ソースプロジェクトは`/builds////`にチェックアウトされます。 プロジェクトのキャッシュは`/cache//`に保存されます。 各要素の内容は次のとおりです: - ``は、`gitlab-runner run`コマンドに渡された`--working-directory`の値、またはRunnerが実行されている現在のディレクトリです。 - ``は、Runnerのトークンの短縮バージョンです(最初の8文字)。 - ``は、プロジェクトのコンテキストで特定のRunnerでローカルジョブIDを識別する一意の番号です([定義済み変数](https://docs.gitlab.com/ci/variables/predefined_variables/)`CI_CONCURRENT_PROJECT_ID`を使用してアクセスできます)。 - ``は、GitLabでプロジェクトが保存されているネームスペースです。 - ``は、GitLabに保存されているプロジェクトの名前です。 `/builds`と`}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="note" >}} SSH executorは、Bashで生成されたスクリプトのみをサポートしており、キャッシュ機能はサポートされていません。 {{< /alert >}} このexecutorでは、SSH経由でコマンドを実行して、リモートマシンでビルドを実行できます。 {{< alert type="note" >}} GitLab RunnerがSSH executorを使用するすべてのリモートシステムで、[一般的な前提要件](_index.md#prerequisites-for-non-docker-executors)を満たしていることを確認してください。 {{< /alert >}} ## SSH executorを使用する {#use-the-ssh-executor} SSH executorを使用するには、[`[runners.ssh]`](../configuration/advanced-configuration.md#the-runnersssh-section)セクションで`executor = "ssh"`を指定します。次に例を示します: ```toml [[runners]] executor = "ssh" [runners.ssh] host = "example.com" port = "22" user = "root" password = "password" identity_file = "/path/to/identity/file" ``` サーバーに対して認証するには、`password`または`identity_file`、あるいはその両方を使用できます。GitLab Runnerは、`/home/user/.ssh/id_(rsa|dsa|ecdsa)`から`identity_file`を暗黙的に読み取りません。`identity_file`は明示的に指定する必要があります。 プロジェクトのソースは`~/builds////`にチェックアウトされます。 各要素の内容は次のとおりです: - ``は、Runnerのトークンの短縮バージョンです(最初の8文字)。 - ``は、プロジェクトのコンテキストで特定のrunner上のローカルジョブIDを識別する一意の番号です。 - ``は、GitLabでプロジェクトが保存されているネームスペースです。 - ``は、GitLabに保存されているプロジェクトの名前です。 `~/builds`ディレクトリを上書きするには、[`config.toml`](../configuration/advanced-configuration.md)の`[[runners]]`セクションで`builds_dir`オプションを指定します。 ジョブアーティファクトをアップロードする場合は、SSH経由で接続するホストに`gitlab-runner`をインストールします。 ## 厳密なホストキーチェックを設定する {#configure-strict-host-key-checking} SSH `StrictHostKeyChecking`は、[デフォルトで有効になっています。](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28192)`StrictHostKeyChecking`のSSHを無効にするには、`[runners.ssh.disable_strict_host_key_checking]`を`true`に設定します。現在のデフォルト値は`false`です。 ================================================ FILE: docs-locale/ja-jp/executors/virtualbox.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: VirtualBox --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="note" >}} Parallels executorは、VirtualBox executorと同じように動作します。ローカルキャッシュはサポートされていません。[分散キャッシュ](../configuration/speed_up_job_execution.md)がサポートされています。 {{< /alert >}} VirtualBoxを使用すると、VirtualBoxの仮想化を使用して、すべてのビルドにクリーンなビルド環境を提供できます。このexecutorは、VirtualBoxで実行できるすべてのシステムをサポートします。唯一の要件は、仮想マシンがSSHサーバーを公開し、BashまたはPowerShellと互換性のあるシェルを提供することです。 {{< alert type="note" >}} GitLab RunnerがVirtualBox executorを使用するすべての仮想マシンで、[一般的な前提条件](_index.md#prerequisites-for-non-docker-executors)を満たしていることを確認してください。 {{< /alert >}} ## 概要 {#overview} プロジェクトのソースコードは、`~/builds//`にチェックアウトされます。 各項目の説明: - ``は、GitLabでプロジェクトが保存されているネームスペースです。 - ``は、GitLabに保存されているプロジェクトの名前です。 `~/builds`ディレクトリをオーバーライドするには、[`config.toml`](../configuration/advanced-configuration.md)の`[[runners]]`セクションで`builds_dir`オプションを指定します。 `GIT_CLONE_PATH`を使用して、ジョブごとに[カスタムビルドディレクトリ](https://docs.gitlab.com/ci/runners/configure_runners/#custom-build-directories)を定義することもできます。 ## 新しいベース仮想マシンを作成する {#create-a-new-base-virtual-machine} 1. [VirtualBox](https://www.virtualbox.org)をインストールします。 - Windowsから実行していて、VirtualBoxがデフォルトの場所にインストールされている場合(たとえば、`%PROGRAMFILES%\Oracle\VirtualBox`)、GitLab Runnerはそれを自動的に検出します。そうでない場合は、`gitlab-runner`プロセスの`PATH`環境変数にインストールフォルダーを追加する必要があります。 1. VirtualBoxで新しい仮想マシンをインポートまたは作成します 1. ネットワークアダプター1を「NAT」として構成します(これは現在、GitLab RunnerがSSH経由でゲストに接続できる唯一の方法です)。 1. (オプション)別のネットワークアダプターを「ブリッジネットワーキング」として構成して、(たとえば)ゲストからインターネットにアクセスできるようにします 1. 新しい仮想マシンにログインします 1. Windows VMの場合は、[Windows VMのチェックリスト](#checklist-for-windows-vms)を参照してください 1. OpenSSHサーバーをインストールします 1. ビルドに必要な他のすべての依存関係をインストールします 1. ジョブアーティファクトをダウンロードまたはアップロードする場合は、VM内に`gitlab-runner`をインストールします 1. ログアウトして、仮想マシンをシャットダウンします Vagrantのような自動化ツールを使用して、仮想マシンをプロビジョニングするのは完全に問題ありません。 ## 新しいRunnerを作成する {#create-a-new-runner} 1. VirtualBoxを実行しているホストにGitLab Runnerをインストールします 1. `gitlab-runner register`で新しいRunnerを登録します 1. `virtualbox`executorを選択します 1. 以前に作成したベース仮想マシンの名前を入力します(仮想マシンの設定の**一般 > Basic > 名前**の下にあります)。 1. 仮想マシンのSSH `user`と`password`、または`identity_file`へのパスを入力します ## 仕組み {#how-it-works} 新しいビルドが開始されるとき: 1. 仮想マシンの一意の名前が生成されます:`runner--concurrent-` 1. 仮想マシンが存在しない場合は、複製されます 1. SSHサーバーにアクセスするために、ポート転送ルールが作成されます 1. GitLab Runnerは、仮想マシンのスナップショットを開始または復元します 1. GitLab Runnerは、SSHサーバーがアクセス可能になるのを待ちます 1. GitLab Runnerは、実行中の仮想マシンのスナップショットを作成します(これは、次のビルドを高速化するために行われます)。 1. GitLab Runnerは仮想マシンに接続し、ビルドを実行します 1. 有効になっている場合、アーティファクトのアップロードは、仮想マシン*内*の`gitlab-runner`バイナリを使用して行われます。 1. GitLab Runnerは、仮想マシンを停止またはシャットダウンします ## Windows VMのチェックリスト {#checklist-for-windows-vms} WindowsでVirtualBoxを使用するには、CygwinまたはPowerShellをインストールできます。 ### Cygwinの使用 {#use-cygwin} - [Cygwin](https://cygwin.com/)をインストールします - `sshd`とGitをCygwinからインストールします(*Git for Windows*は使用しないでください。 パスの問題が発生します!) - Git LFSをインストールします - `sshd`を構成し、サービスとしてセットアップします([Cygwin Wiki](https://cygwin.fandom.com/wiki/Sshd)を参照)。 - ポート22で受信TCPトラフィックを許可するように、Windowsファイアウォールのルールを作成します - GitLabサーバーを`~/.ssh/known_hosts`に追加します - CygwinとWindows間でパスを変換するには、[`cygpath`ユーティリティ](https://cygwin.fandom.com/wiki/Cygpath_utility)を使用します ### ネイティブOpenSSHとPowerShellの使用 {#use-native-openssh-and-powershell} - [PowerShell](https://learn.microsoft.com/en-us/powershell/scripting/install/install-powershell-on-windows?view=powershell-7.4)をインストールします - [OpenSSH](https://learn.microsoft.com/en-us/windows-server/administration/openssh/openssh_install_firstuse?tabs=powershell#install-openssh-for-windows)をインストールして構成します - [Git for Windows](https://git-scm.com/)をインストールします - [のデフォルトシェルを`pwsh`として設定](https://learn.microsoft.com/en-us/windows-server/administration/OpenSSH/openssh-server-configuration#configuring-the-default-shell-for-openssh-in-windows)します。正しいフルパスで例を更新します: ```powershell New-ItemProperty -Path "HKLM:\SOFTWARE\OpenSSH" -Name DefaultShell -Value "$PSHOME\pwsh.exe" -PropertyType String -Force ``` - [`config.toml`](../configuration/advanced-configuration.md)にシェル`pwsh`を追加します ================================================ FILE: docs-locale/ja-jp/faq/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerのトラブルシューティング --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} このセクションは、GitLab Runnerの問題を解決する際に役立ちます。 ## 一般的なトラブルシューティングのヒント {#general-troubleshooting-tips} ### ログを表示する {#view-the-logs} GitLab Runnerサービスはログをsyslogに送信します。ログを表示するには、ディストリビューションのドキュメントを参照してください。ディストリビューションに`journalctl`コマンドが含まれている場合は、そのコマンドを使用してログを表示できます: ```shell journalctl --unit=gitlab-runner.service -n 100 --no-pager docker logs gitlab-runner-container # Docker kubectl logs gitlab-runner-pod # Kubernetes ``` ### サービスを再起動する {#restart-the-service} ```shell systemctl restart gitlab-runner.service ``` ### Docker Machineを表示する {#view-the-docker-machines} ```shell sudo docker-machine ls sudo su - && docker-machine ls ``` ### すべてのDocker Machineを削除する {#delete-all-docker-machines} ```shell docker-machine rm $(docker-machine ls -q) ``` ### `config.toml`に変更を適用する {#apply-changes-to-configtoml} ```shell systemctl restart gitlab-runner.service docker-machine rm $(docker-machine ls -q) # Docker machine journalctl --unit=gitlab-runner.service -f # Tail the logs to check for potential errors ``` ## GitLabおよびGitLab Runnerのバージョンを確認する {#confirm-your-gitlab-and-gitlab-runner-versions} GitLabは[下位互換性を保証](../_index.md#gitlab-runner-versions)することを目標としています。ただし、最初のトラブルシューティング手順として、GitLab RunnerのバージョンがGitLabのバージョンと同じであることを確認する必要があります。 ## `coordinator`について {#what-does-coordinator-mean} `coordinator`は、ジョブのリクエスト元であるGitLabインストールのことです。 つまりRunnerは、`coordinator`(GitLab APIを介したGitLabインストール)からジョブをリクエストする、分離されたエージェントです。 ## Windowsでサービスとして実行する場合にログはどこに保存されますか? {#where-are-logs-stored-when-run-as-a-service-on-windows} - GitLab RunnerがWindowsでサービスとして実行されている場合、システムイベントログが作成されます。これらを表示するには、イベントビューアーを開きます(「ファイル名を指定して実行」メニューで`eventvwr.msc`と入力するか、「イベントビューアー」を検索します)。次に、**Windows Logs > Application**に移動します。Runnerログの**ソース**は`gitlab-runner`です。Windows Server Coreを使用している場合は、PowerShellコマンド`get-eventlog Application -Source gitlab-runner -Newest 20 | format-table -wrap -auto`を実行して、最後の20件のログエントリを取得します。 ## デバッグログ生成モードを有効にする {#enable-debug-logging-mode} {{< alert type="warning" >}} デバッグログ生成は、重大なセキュリティリスクとなる可能性があります。出力には、ジョブで使用可能なすべての変数およびその他のシークレットの内容が含まれます。サードパーティにシークレットを送信する可能性のあるログ集計はすべて無効にする必要があります。マスクされた変数を使用すると、ジョブログ出力ではシークレットを保護できますが、コンテナログでは保護できません。 {{< /alert >}} ### コマンドライン {#in-the-command-line} rootとしてログインしたターミナルから、以下を実行します。 {{< alert type="warning" >}} このコマンドは`systemd`サービスを再定義し、すべてのジョブをrootとして実行するため、[Shell executor](../executors/shell.md)を使用するRunnerでは実行しないでください。これはセキュリティ上のリスクをもたらし、特権なしのアカウントに戻すことが困難になるファイル所有権の変更につながります。 {{< /alert >}} ```shell gitlab-runner stop gitlab-runner --debug run ``` ### GitLab Runner `config.toml`内 {#in-the-gitlab-runner-configtoml} デバッグログ生成を有効にするには、[`config.toml`のグローバルセクション](../configuration/advanced-configuration.md#the-global-section)で`log_level`を`debug`に設定します。`config.toml`の最上部で、concurrent行の前または後に次の行を追加します: ```toml log_level = "debug" ``` ### Helmチャート内 {#in-the-helm-chart} [GitLab Runner Helmチャート](../install/kubernetes.md)を使用してKubernetesクラスターにGitLab Runnerがインストールされている場合、デバッグログ生成を有効にするには、[`values.yaml`のカスタマイズ](../install/kubernetes.md#configure-gitlab-runner-with-the-helm-chart)で`logLevel`オプションを設定します: ```yaml ## Configure the GitLab Runner logging level. Available values are: debug, info, warn, error, fatal, panic ## ref: https://docs.gitlab.com/runner/configuration/advanced-configuration/#the-global-section ## logLevel: debug ``` ## Docker executor RunnerのDNSを設定する {#configure-dns-for-a-docker-executor-runner} Docker executorでGitLab Runnerを設定すると、ホストRunnerデーモンがGitLabにアクセスできてもDockerコンテナがアクセスできない場合があります。これは、ホストでDNSが設定されていても、その設定がコンテナに渡されない場合に発生する可能性があります。 **例**: GitLabサービスとGitLab Runnerが、2種類の方法(インターネット経由とVPN経由など)でブリッジされる2つの異なるネットワークに存在しています。Runnerのルーティングメカニズムでは、VPN経由のDNSサービスではなく、デフォルトのインターネットサービスを介してDNSをクエリする可能性があります。この設定を使用すると、次のメッセージが表示されます: ```shell Created fresh repository. ++ echo 'Created fresh repository.' ++ git -c 'http.userAgent=gitlab-runner 16.5.0 linux/amd64' fetch origin +da39a3ee5e6b4b0d3255bfef95601890afd80709:refs/pipelines/435345 +refs/heads/master:refs/remotes/origin/master --depth 50 --prune --quiet fatal: Authentication failed for 'https://gitlab.example.com/group/example-project.git/' ``` この場合の認証の失敗の原因は、インターネットとGitLabサービスの間にあるサービスにあります。このサービスは個別の認証情報を使用しており、RunnerがVPN経由でDNSサービスを使用した場合は、Runnerがそれらの認証情報を回避している可能性があります。 使用するDNSサーバーをDockerに指示するには、[Runnerの`config.toml`ファイル](../configuration/advanced-configuration.md#the-runnersdocker-section)の`[runners.docker]`セクションで`dns`設定を使用します。 ```toml dns = ["192.168.xxx.xxx","192.168.xxx.xxx"] ``` ## `x509: certificate signed by unknown authority`が表示される {#im-seeing-x509-certificate-signed-by-unknown-authority} 詳細については、[自己署名証明書](../configuration/tls-self-signed.md)を参照してください。 ## `/var/run/docker.sock`へアクセスするときに`Permission Denied`が表示される {#i-get-permission-denied-when-accessing-the-varrundockersock} Docker executorを使用する場合に、サーバーにインストールされているDocker Engineに接続しているとします。この場合には`Permission Denied`エラーが表示されることがあります。最も可能性が高い原因は、システムがSELinuxを使用していることです(CentOS、Fedora、RHELではデフォルトで有効になっています)。システムでSELinuxポリシーを調べて、拒否がないか確認してください。 ## Docker-machineエラー: `Unable to query docker version: Cannot connect to the docker engine endpoint.` {#docker-machine-error-unable-to-query-docker-version-cannot-connect-to-the-docker-engine-endpoint} このエラーはマシンのプロビジョニングに関連しており、次の原因が考えられます: - TLSエラーが発生している。`docker-machine`がインストールされている場合、一部の証明書が無効になっている可能性があります。このイシューを解決するには、証明書を削除してRunnerを再起動します: ```shell sudo su - rm -r /root/.docker/machine/certs/* service gitlab-runner restart ``` 再起動したRunnerは、証明書が空であると認識し、証明書を再作成します。 - ホスト名が、プロビジョニングされたマシンでサポートされている長さを超えている。たとえば、Ubuntuマシンでの`HOST_NAME_MAX`の文字数制限は64文字です。ホスト名は`docker-machine ls`によって報告されます。Runner設定で`MachineName`を確認し、必要に応じてホスト名を短くします。 {{< alert type="note" >}} このエラーは、Dockerがマシンにインストールされる前に発生していた可能性があります。 {{< /alert >}} ## `dialing environment connection: ssh: rejected: connect failed (open failed)` {#dialing-environment-connection-ssh-rejected-connect-failed-open-failed} このエラーは、SSH経由で接続をトンネルしているときに、Docker autoscalerがターゲットシステムのDockerデーモンに到達できない場合に発生します。ターゲットシステムにSSHで接続し、`docker info`などのDockerコマンドを正常に実行できることを確認します。 ## オートスケールされたRunnerにAWSインスタンスプロファイルを追加する {#adding-an-aws-instance-profile-to-your-autoscaled-runners} AWS IAMロールを作成した後、IAMコンソールではそのロールに**Role ARN**(ロールARN)と**Instance Profile ARNs**(インスタンスプロファイルARN)があります。**Role Name**(ロール名)**not**(ではなく)**Instance Profile**(インスタンスプロファイル)名を使用する必要があります。 `[runners.machine]`セクションに値`"amazonec2-iam-instance-profile=",`を追加します。 ## Javaプロジェクトのビルド時にDocker executorがタイムアウトになる {#the-docker-executor-gets-timeout-when-building-java-project} 最も可能性が高い原因は、破損した`aufs`ストレージドライバーです: [Javaプロセスがコンテナ内でハングアップ](https://github.com/moby/moby/issues/18502)します。最適な解決策は、[ストレージドライバー](https://docs.docker.com/engine/storage/drivers/select-storage-driver/)をOverlayFSまたはDeviceMapper(低速)のいずれかに変更することです。 [Dockerの設定と実行に関する記事](https://docs.docker.com/engine/daemon/) 、または[systemdによる制御と設定に関する記事](https://docs.docker.com/engine/daemon/proxy/#systemd-unit-file)を確認してください。 ## アーティファクトのアップロード時に411が表示される {#i-get-411-when-uploading-artifacts} GitLab Runnerが`Transfer-Encoding: chunked`を使用していることが原因で発生します。これは、以前のバージョンのNGINXで破損しています()。 NGINXを新しいバージョンにアップグレードしてください。詳細については、イシューを参照してください。 ## 他のアーティファクトのアップロードエラーが発生しています。このエラーを詳しくデバッグするにはどうすればよいですか? {#i-am-seeing-other-artifact-upload-errors-how-can-i-further-debug-this} アーティファクトは、GitLab Runnerプロセスを回避して、ビルド環境からGitLabインスタンスに直接アップロードされます。次に例を示します: - Docker executorの場合、アップロードはDockerコンテナから行われます - Kubernetes executorの場合、アップロードはビルドポッドのビルドコンテナから行われます ビルド環境からGitLabインスタンスへのネットワークルートは、GitLab RunnerからGitLabインスタンスへのルートとは異なる場合があります。 アーティファクトのアップロードを有効にするには、アップロードパス内のすべてのコンポーネントが、ビルド環境からGitLabインスタンスへのPOSTリクエストを許可していることを確認します。 デフォルトでは、アーティファクトアップローダーはアップロードURLとアップロード応答のHTTPステータスコードをログに記録します。この情報だけでは、どのシステムがエラーを引き起こしたか、またはアーティファクトのアップロードをブロックしたかを理解するには不十分です。アーティファクトのアップロードの問題を解決するには、アップロード応答のヘッダーと本文を確認するために、アップロード試行で[デバッグログ生成を有効にします](https://docs.gitlab.com/ci/variables/#enable-debug-logging)。 {{< alert type="note" >}} アーティファクトのアップロードのデバッグログの応答本文の長さは、512バイトに制限されています。機密データがログに公開される可能性があるため、ログ生成はデバッグ目的でのみ有効にしてください。 {{< /alert >}} アップロードがGitLabに到達してもエラー状態コードで失敗する場合(たとえば、成功以外の応答ステータスコードが生成される場合)は、GitLabインスタンス自体を調べます。一般的なアーティファクトのアップロードのイシューについては、[GitLabドキュメント](https://docs.gitlab.com/administration/cicd/job_artifacts_troubleshooting/#job-artifact-upload-fails-with-error-500)を参照してください。 ## `No URL provided, cache will not be download`/`uploaded` {#no-url-provided-cache-will-not-be-downloaduploaded} このエラーは、GitLab Runnerヘルパーが無効なURLを受信するか、リモートキャッシュにアクセスするための事前署名付きURLがない場合に発生します。[`config.toml`のキャッシュ関連のエントリ](../configuration/advanced-configuration.md#the-runnerscache-section)と、プロバイダー固有のキーと値を確認します。URL構文の要件に従っていないアイテムから無効なURLが作成される可能性があります。 また、ヘルパー`image`と`helper_image_flavor`が一致し、最新であることを確認してください。 認証情報の設定に問題がある場合は、診断エラーメッセージがGitLab Runnerプロセスログに追加されます。 ## エラー: `warning: You appear to have cloned an empty repository.` {#error-warning-you-appear-to-have-cloned-an-empty-repository} HTTP(S)を使用して`git clone`を実行すると(GitLab Runnerを使用するか、テスト用に手動で実行)、次の出力が表示されます: ```shell $ git clone https://git.example.com/user/repo.git Cloning into 'repo'... warning: You appear to have cloned an empty repository. ``` GitLabサーバーのインストールでHTTPプロキシ設定が正しく行われていることを確認してください。独自の設定でHTTPプロキシを使用する場合は、リクエストが**GitLab Workhorse socket**(GitLab Workhorseソケット)ではなく**GitLab Unicorn socket**(GitLab Unicornソケット)にプロキシされることを確認してください。 HTTP(S)を介したGitプロトコルはGitLab Workhorseによって解決されるため、これはGitLabの**main entrypoint**(メインエントリポイント)です。 Linuxパッケージのインストールを使用しているが、バンドルされているNGINXサーバーを使用したくない場合は、[バンドルされていないWebサーバーを使用する](https://docs.gitlab.com/omnibus/settings/nginx/#use-a-non-bundled-web-server)を参照してください。 GitLabレシピリポジトリには、ApacheとNGINXの[Webサーバー設定の例](https://gitlab.com/gitlab-org/gitlab-recipes/tree/master/web-server)があります。 ソースからインストールされたGitLabを使用している場合は、上記のドキュメントと例を参照してください。すべてのHTTP(S)トラフィックが**GitLab Workhorse**を経由していることを確認してください。 [ユーザーイシューの例](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1105)を参照してください。 ## エラー: `Timezone`または`OffPeakTimezone`の使用時に`zoneinfo.zip: no such file or directory`エラーが発生する {#error-zoneinfozip-no-such-file-or-directory-error-when-using-timezone-or-offpeaktimezone} `[[docker.machine.autoscaling]]`の期間が記述されているタイムゾーンを設定できます。この機能は、ほとんどのUnixシステムですぐに動作するはずです。ただし、一部のUnixシステムとほとんどの非Unixシステム(GitLab Runnerバイナリが利用可能なWindowsなど)では、Runnerが起動時に次のエラーでクラッシュする可能性があります: ```plaintext Failed to load config Invalid OffPeakPeriods value: open /usr/local/go/lib/time/zoneinfo.zip: no such file or directory ``` このエラーは、Goの`time`パッケージが原因で発生します。GoはIANA Time Zoneデータベースを使用して、指定されたタイムゾーンの設定を読み込みます。ほとんどのUnixシステムでは、このデータベースは、既知のパス(`/usr/share/zoneinfo`、`/usr/share/lib/zoneinfo`、`/usr/lib/locale/TZ/`)のいずれかにすでに存在しています。Goの`time`パッケージは、これら3つのパスすべてでTime Zoneデータベースを検索します。いずれも見つからないが、マシンに設定済みのGo開発環境がある場合は、`$GOROOT/lib/time/zoneinfo.zip`ファイルにフォールバックします。 これらのパスがいずれも存在しない場合(本番環境のWindowsホスト上など)は、上記のエラーがスローされます。 システムがIANA Time Zoneデータベースをサポートしているが、デフォルトでは利用できない場合は、このデータベースをインストールしてみることができます。Linuxシステムでは、次のような方法でこのインストールを実行できます: ```shell # on Debian/Ubuntu based systems sudo apt-get install tzdata # on RPM based systems sudo yum install tzdata # on Linux Alpine sudo apk add -U tzdata ``` システムがこのデータベースを_ネイティブ_な方法で提供していない場合は、次の手順に従って`OffPeakTimezone`を動作させることができます: 1. [`zoneinfo.zip`](https://gitlab-runner-downloads.s3.amazonaws.com/latest/zoneinfo.zip)をダウンロードします。バージョンv9.1.0以降では、タグ付けされたパスからファイルをダウンロードできます。この場合は、`zoneinfo.zip`ダウンロードURLで`latest`をタグ名(`v9.1.0`など)に置き換える必要があります。 1. このファイルを既知のディレクトリに保存します。`config.toml`ファイルが存在するディレクトリを使用することをお勧めします。たとえば、WindowsマシンでRunnerをホスティングしていて、設定ファイルが`C:\gitlab-runner\config.toml`に保存されている場合は、`zoneinfo.zip`を`C:\gitlab-runner\zoneinfo.zip`に保存します。 1. `zoneinfo.zip`ファイルのフルパスを含む`ZONEINFO`環境変数を設定します。`run`コマンドを使用してRunnerを起動する場合は、次のようにします: ```shell ZONEINFO=/etc/gitlab-runner/zoneinfo.zip gitlab-runner run ``` Windowsを使用している場合は次のようにします: ```powershell C:\gitlab-runner> set ZONEINFO=C:\gitlab-runner\zoneinfo.zip C:\gitlab-runner> gitlab-runner run ``` GitLab Runnerをシステムサービスとして起動する場合は、サービス設定を更新または上書きする必要があります: - Unixシステムでは、サービスマネージャーソフトウェアで設定を変更します。 - Windowsでは、システム設定でGitLab Runnerユーザーが利用できる環境変数のリストに`ZONEINFO`変数を追加します。 ## 複数のGitLab Runnerインスタンスを実行できないのはなぜですか? {#why-cant-i-run-more-than-one-instance-of-gitlab-runner} 同じ`config.toml`ファイルを共有していなければ実行できます。 同じ設定ファイルを使用する複数のGitLab Runnerインスタンスを実行すると、デバッグが難しい予期しない動作が発生する可能性があります。一度に1つのGitLab Runnerインスタンスのみが特定の`config.toml`ファイルを使用できます。 ## ジョブの開始前に遅延が発生する {#jobs-experience-delays-before-starting} 一部のプロジェクトのジョブで開始前に大幅な遅延が発生し、他のプロジェクトのジョブがすぐに実行される場合、longポーリングの問題が発生している可能性があります。 **Symptoms:**(症状:) - ジョブはキューに入れられていますが、実行の開始に異常に長い時間がかかります(通常、GitLabインスタンスのlongポーリングタイムアウトに一致します)。 - 一部のRunnerは停止しているように見えますが、他のRunnerはジョブを正常に処理します。 - GitLab Runnerのログに`CONFIGURATION: Long polling issues detected`と表示されます。 **Cause:**(原因:) このイシューは、GitLab RunnerワーカーがGitLabへのlongポーリングリクエストで停止し、他のジョブが迅速に処理されるのを妨げる場合に発生します。これらのイシューは、設定に応じて、パフォーマンスのボトルネックから完全なデッドロックまで多岐にわたります。このイシューは、GitLab Workhorse `apiCiLongPollingDuration`設定(デフォルト)によって制御されるGitLab CI/CD longポーリング機能に関連しています: 50秒)。 **Solution:**(解決策:) これらのイシューは、いくつかの設定シナリオで発生する可能性があります。原因、設定例、および解決策に関する包括的な情報については、高度な設定ドキュメントの[Longポーリングのイシュー](../configuration/advanced-configuration.md#long-polling-issues)セクションを参照してください。 ## `Job failed (system failure): preparing environment:` {#job-failed-system-failure-preparing-environment} このエラーは多くの場合、Shellによる[プロファイルの読み込み](../shells/_index.md#shell-profile-loading)が原因で発生します。スクリプトの1つが失敗の原因となっています。 失敗の原因となることが判明している`dotfiles`の例: - `.bash_logout` - `.condarc` - `.rvmrc` SELinuxもこのエラーの原因となる可能性があります。これは、SELinux監査ログを調べることで確認できます: ```shell sealert -a /var/log/audit/audit.log ``` ## Runnerが`Cleaning up`ステージの後に突然終了する {#runner-abruptly-terminates-after-cleaning-up-stage} 「コンテナドリフト検出」設定が有効になっている場合に、ジョブの`Cleaning up files`ステージの後でCrowdStrike Falcon Sensorがポッドを強制終了することが報告されています。ジョブを確実に完了できるようにするには、この設定を無効にする必要があります。 ## ジョブが`remote error: tls: bad certificate (exec.go:71:0s)`で失敗する {#job-fails-with-remote-error-tls-bad-certificate-execgo710s} このエラーは、アーティファクトを作成するジョブの実行中にシステム時刻が大幅に変更された場合に発生する可能性があります。システム時刻が変更されたため、SSL証明書の有効期限が切れ、Runnerがアーティファクトをアップロードしようとするとエラーが発生します。 アーティファクトのアップロード中にSSL検証が確実に成功するようにするには、ジョブの終わりにシステム時刻を有効な日付と時刻に変更します。アーティファクトファイルの作成時刻も変更されているため、アーティファクトファイルは自動的にアーカイブされます。 ## Helmチャート: `ERROR .. Unauthorized` {#helm-chart-error--unauthorized} HelmでデプロイされたRunnerをアンインストールまたはアップグレードする前に、GitLabでRunnerを一時停止し、ジョブが完了するまで待ちます。 ジョブの実行中に`helm uninstall`または`helm upgrade`を使用してRunnerポッドを削除すると、ジョブが完了したときに、次のような`Unauthorized`エラーが発生する可能性があります: ```plaintext ERROR: Error cleaning up pod: Unauthorized ERROR: Error cleaning up secrets: Unauthorized ERROR: Job failed (system failure): Unauthorized ``` これはおそらく、Runnerが削除されるとロールバインドが削除されることが原因で発生します。Runnerポッドはジョブが完了するまで継続し、その後、RunnerがRunnerポッドを削除しようとします。ロールバインドがないと、Runnerポッドはアクセスできなくなります。 詳細については、[このイシュー](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/225)を参照してください。 ## Elasticsearchサービスコンテナの起動エラー`max virtual memory areas vm.max_map_count [65530] is too low, increase to at least [262144]` {#elasticsearch-service-container-startup-error-max-virtual-memory-areas-vmmax_map_count-65530-is-too-low-increase-to-at-least-262144} Elasticsearchには、Elasticsearchが実行されるインスタンスで設定する必要がある`vm.max_map_count`要求事項があります。 プラットフォームに応じてこの値を正しく設定する方法については、[Elasticsearchドキュメント](https://www.elastic.co/docs/deploy-manage/deploy/self-managed/install-elasticsearch-docker-prod)を参照してください。 ## エラー: `Preparing the "docker+machine" executor ERROR: Preparation failed: exit status 1 Will be retried in 3s` {#error-preparing-the-dockermachine-executor-error-preparation-failed-exit-status-1-will-be-retried-in-3s} このエラーは、Docker Machineがexecutor仮想マシンを正常に作成できない場合に発生する可能性があります。このエラーに関する詳細情報を取得するには、`config.toml`で定義した`MachineOptions`を使用して、仮想マシンを手動で作成します。 例: `docker-machine create --driver=google --google-project=GOOGLE-PROJECT-ID --google-zone=GOOGLE-ZONE ...`。 ## エラー: `No unique index found for name` {#error-no-unique-index-found-for-name} このエラーは、Runnerを作成または更新するときに、データベースに`tags`テーブルの一意のインデックスがない場合に発生する可能性があります。GitLab UIで`Response not successful: Received status code 500`エラーが発生する場合があります。 このイシューは、長期間にわたって複数のメジャーアップグレードが行われたインスタンスに影響を与える可能性があります。このイシューを解決するには、[`gitlab:db:deduplicate_tags` Rakeタスク](https://docs.gitlab.com/administration/raketasks/maintenance/#check-the-database-for-deduplicate-cicd-tags)を使用して、テーブル内の重複するタグを統合します。詳細については、[Rakeタスク](https://docs.gitlab.com/administration/raketasks/)を参照してください。 ================================================ FILE: docs-locale/ja-jp/fleet_scaling/_index.md ================================================ --- stage: Verify group: CI Functions Platform info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: インスタンスRunnerまたはグループRunnerのRunnerフリートを計画および運用する --- 共有サービスモデルでRunnerフリートをスケールする際に、これらのベストプラクティスと推奨事項を適用します。 インスタンスRunnerフリートをホストする場合は、以下を考慮して十分に計画されたインフラストラクチャが必要です: - コンピューティングキャパシティ。 - ストレージキャパシティ。 - ネットワークの帯域幅とスループット。 - ジョブの種類(プログラミング言語、OSプラットフォーム、依存関係ライブラリなど)。 これらの推奨事項を参考に、組織の要件に基づいたGitLab Runnerのデプロイ戦略を策定してください。 ## ワークロードと環境を検討する {#consider-your-workload-and-environment} Runnerをデプロイする前に、ワークロードと環境の要件を検討してください。 - GitLabにオンボードする予定のチームのリストを作成します。 - 組織で使用しているプログラミング言語、Webフレームワーク、およびライブラリをカタログ化します。たとえば、Go、C++、PHP、Java、Python、JavaScript、React、Node.jsなどです。 - 各チームが1日あたり、1時間ごとに実行するCI/CDジョブの数を推定します。 - いずれかのチームに、コンテナを使用しても対処できないビルド環境要件があるかどうかを検証します。 - いずれかのチームに、チーム専用のRunnerを用意することで最適に対応できるビルド環境要件があるかどうかを検証します。 - 予想される需要に対応するために必要なコンピューティングキャパシティを見積もります。 さまざまなRunnerフリートをホストするために、異なるインフラストラクチャスタックを選択できます。たとえば、パブリッククラウドにデプロイすることの必要なRunnerと、オンプレミスにデプロイすることの必要なRunnerがあるかもしれません。 RunnerフリートでのCI/CDジョブのパフォーマンスは、フリートの環境に直接関係しています。大量のリソースを消費するCI/CDジョブを多数実行している場合、共有コンピューティングプラットフォームでRunnerフリートをホスティングすることはお勧めできません。 ## Runner、executor、およびオートスケール機能 {#runners-executors-and-autoscaling-capabilities} `gitlab-runner`実行可能ファイルはCI/CDジョブを実行します。各Runnerは、ジョブ実行のリクエストを取得し、事前定義された設定に従って処理する分離プロセスです。各Runnerは分離プロセスとして、ジョブを実行するための「サブプロセス」(「ワーカー」とも呼ばれる)を作成できます。 ### 並行処理数と制限 {#concurrency-and-limit} - [並行処理数](../configuration/advanced-configuration.md#the-global-section): ホストシステムで設定済みのすべてのRunnerを使用している場合に、同時実行できるジョブの数を設定します。 - [制限](../configuration/advanced-configuration.md#the-runners-section): Runnerがジョブの同時実行のために作成できるサブプロセスの数を設定します。 この制限は、(Docker MachineやKubernetesのような)オートスケールRunnerと、オートスケールしないRunnerでは異なります。 - オートスケールしないRunnerの場合、`limit`はホストシステムのRunnerのキャパシティを定義します。 - オートスケールRunnerの場合、`limit`は実行するRunnerの合計数です。 `concurrency`、`limit`、および`request_concurrency`がどのように連携してジョブフローを制御するかについて詳しくは、[GitLab Runnerの並行処理チューニングに関するKB記事](https://support.gitlab.com/hc/en-us/articles/21324350882076-GitLab-Runner-Concurrency-Tuning-Understanding-request-concurrency)をご覧ください。 ### 基本設定: 1つのRunnerマネージャー、1つのRunner {#basic-configuration-one-runner-manager-one-runner} 最も基本的な設定では、サポートされているコンピューティングアーキテクチャとオペレーティングシステムにGitLab Runnerソフトウェアをインストールします。たとえば、Ubuntu Linuxを実行しているx86-64仮想マシン(VM)があるとします。 インストールが完了したら、Runnerの登録コマンドを1回だけ実行し、`shell` executorを選択します。次にRunnerの`config.toml`ファイルを編集して、並行処理数を`1`に設定します。 ```toml concurrent = 1 [[runners]] name = "instance-level-runner-001" url = "" token = "" executor = "shell" ``` このRunnerが処理できるGitLab CI/CDジョブは、Runnerをインストールしたホストシステム上で直接実行されます。これは、ターミナルでCI/CDジョブコマンドを自分で実行する場合と同様です。この場合、登録コマンドを実行したのは1回だけなので、`config.toml`ファイルに含まれる`[[runners]]`セクションは1つだけです。並行処理数の値を`1`に設定した場合、1つのRunner「ワーカー」のみがこのシステムのRunnerプロセスでCI/CDジョブを実行できます。 ### 中程度の設定: 1つのRunnerマネージャー、複数のRunner {#intermediate-configuration-one-runner-manager-multiple-runners} 同じマシンに複数のRunnerを登録することもできます。このように登録すると、Runnerの`config.toml`ファイルに複数の`[[runners]]`セクションが含まれます。追加のすべてのRunnerワーカーがShell executorを使用している場合に、グローバルの`concurrent`設定の値を`3`に更新すると、ホストは一度に最大3つのジョブを実行できます。 ```toml concurrent = 3 [[runners]] name = "instance_level_shell_001" url = "" token = "" executor = "shell" [[runners]] name = "instance_level_shell_002" url = "" token = "" executor = "shell" [[runners]] name = "instance_level_shell_003" url = "" token = "" executor = "shell" ``` 同じマシンに複数のRunnerワーカーを登録でき、各ワーカーは分離プロセスになります。各ワーカーのCI/CDジョブのパフォーマンスは、ホストシステムのコンピューティングキャパシティに依存します。 ### オートスケール設定: 1つ以上のRunnerマネージャー、複数のワーカー {#autoscaling-configuration-one-or-more-runner-managers-multiple-workers} オートスケール用にGitLab Runnerがセットアップされている場合、1つのRunnerが他のRunnerのマネージャーとして機能するように設定できます。これは、`docker-machine` executorまたは`kubernetes` executorで行うことができます。このようなマネージャーのみの設定では、Runnerエージェント自体はCI/CDジョブを実行しません。 #### Docker Machine executor {#docker-machine-executor} [Docker Machine Executor](../executors/docker_machine.md)を使用する場合、次のようになります: - Runnerマネージャーは、Dockerを使用してオンデマンドの仮想マシンインスタンスをプロビジョニングします。 - これらのVMで、GitLab Runnerは、`.gitlab-ci.yml`ファイルに指定されているコンテナイメージを使用して、CI/CDジョブを実行します。 - さまざまなマシンタイプでCI/CDジョブのパフォーマンスをテストする必要があります。 - スピードまたはコストに基づいてコンピューティングホストを最適化することを検討する必要があります。 #### Kubernetes executor {#kubernetes-executor} [Kubernetes executor](../executors/kubernetes/_index.md)を使用する場合、次のようになります: - Runnerマネージャーが、ターゲットのKubernetesクラスターでポッドをプロビジョニングします。 - CI/CDジョブは、複数のコンテナで構成される各ポッドで実行されます。 - ジョブの実行に使用されるポッドは通常、Runnerマネージャーをホストするポッドよりも多くのコンピューティングとメモリリソースを必要とします。 #### Runner設定を再利用する {#reusing-a-runner-configuration} 同じRunner認証トークンに関連付けられている各Runnerマネージャーには、`system_id`識別子が割り当てられます。`system_id`は、Runnerが使用されているマシンを識別します。同じ認証トークンで登録されたRunnerは、一意の`system_id.`によって1つのRunnerエントリにグループ化されます。 類似するRunnerを1つの設定にグループ化すると、Runnerフリートのオペレーションが簡素化されます。 類似するRunnerを1つの設定にグループ化できるシナリオの例を次に示します: プラットフォーム管理者は、タグ`docker-builds-2vCPU-8GB`を使用して、基盤となる仮想マシンインスタンスサイズ(2 vCPU、8 GB RAM)が同じである複数のRunnerを指定する必要があります。高可用性またはスケーリングのために、このようなRunnerが少なくとも2つ必要です。UIで2つの個別のRunnerエントリを作成する代わりに、管理者は、同じコンピューティングインスタンスサイズを持つすべてのRunnerに対して1つのRunner設定を作成できます。複数のRunnerを登録するために、Runner設定に認証トークンを再利用できます。登録された各Runnerは`docker-builds-2vCPU-8GB`タグを継承します。1つのRunner設定のすべての子Runnerに対して、`system_id`は固有識別子として機能します。 グループにまとめられたRunnerは、複数のRunnerマネージャーによってさまざまなジョブを実行するために再利用できます。 GitLab Runnerは、起動時、または設定の保存時に`system_id`を生成します。`system_id`は、[`config.toml`](../configuration/advanced-configuration.md)と同じディレクトリ内の`.runner_system_id`ファイルに保存され、ジョブログとRunner管理ページに表示されます。 ##### `system_id`識別子を生成する {#generating-system_id-identifiers} GitLab Runnerは`system_id`を生成するために、ハードウェア識別子(一部のLinuxディストリビューションの`/etc/machine-id`など)から一意のシステム識別子を派生しようと試みます。この操作が成功しなかった場合、GitLab Runnerはランダムな識別子を使用して`system_id`を生成します。 `system_id`には、次のいずれかのプレフィックスが付いています: - `r_`: GitLab Runnerがランダムな識別子を割り当てました。 - `s_`: GitLab Runnerがハードウェア識別子から一意のシステム識別子を割り当てました。 たとえば、`system_id`がイメージにハードコードされないように、コンテナイメージを作成する際にこの点を考慮することが重要です。`system_id`がハードコーディングされている場合、特定のジョブを実行しているホストを区別できません。 ##### RunnerとRunnerマネージャーを削除する {#delete-runners-and-runner-managers} Runner登録トークン(非推奨)を使用して登録されたRunnerとRunnerマネージャーを削除するには、`gitlab-runner unregister`コマンドを使用します。 Runner認証トークンを使用して作成されたRunnerとRunnerマネージャーを削除するには、[UI](https://docs.gitlab.com/ci/runners/runners_scope/#delete-instance-runners)または[API](https://docs.gitlab.com/api/runners/#delete-a-runner)を使用します。Runner認証トークンを使用して作成されたRunnerは再利用可能な設定であり、複数のマシンで再利用できます。[`gitlab-runner unregister`](../commands/_index.md#gitlab-runner-unregister)コマンドを使用すると、Runnerマネージャーのみが削除され、Runnerは削除されません。 ## インスタンスRunnerを設定する {#configure-instance-runners} 効率的かつ効果的な開始方法は、オートスケール設定(Runnerが「Runnerマネージャー」として機能する設定)でインスタンスRunnerを使用することです。 VMまたはポッドをホストするインフラストラクチャスタックのコンピューティングキャパシティは、以下の条件によって異なります: - ワークロードと環境を検討する際に特定した要件。 - Runnerフリートをホストするために使用するテクノロジースタック。 CI/CDワークロードの実行と、経時的なパフォーマンスの分析を開始した後で、場合によってはコンピューティングキャパシティを調整する必要があります。 インスタンスRunnerとオートスケールexecutorを使用する設定では、最小限の2つのRunnerマネージャーで開始する必要があります。 時間の経過とともに必要になるRunnerマネージャーの合計数は、以下の条件によって異なります: - Runnerマネージャーをホストするスタックのコンピューティングリソース。 - 各Runnerマネージャーに設定する並行処理数。 - 各マネージャーが毎時、毎日、毎月実行するCI/CDジョブによって生成される負荷。 たとえばGitLab.comでは、Docker Machine Executorで7つのRunnerマネージャーを実行します。各CI/CDジョブは、Google Cloud Platform(GCP)`n1-standard-1` VMで実行されます。この設定では、毎月数百万件のジョブを処理します。 ## Runnerのモニタリング {#monitoring-runners} 大規模なRunnerフリートを運用する上で不可欠なステップは、GitLabに含まれている[Runnerモニタリング](../monitoring/_index.md)機能をセットアップして使用することです。 次の表に、GitLab Runnerメトリクスの概要を示します。このリストには、Go固有のプロセスメトリクスは含まれていません。Runnerでこれらのメトリクスを表示するには、[利用可能なメトリクス](../monitoring/_index.md#available-metrics)に示されているようにコマンドを実行します。 | メトリクス名 | 説明 | |----------------------------------------------------------------|-------------| | `gitlab_runner_api_request_statuses_total` | Runner、エンドポイント、状態に基づいてパーティショニングされたAPIリクエストの総数。 | | `gitlab_runner_autoscaling_machine_creation_duration_seconds` | マシン作成時間のヒストグラム。 | | `gitlab_runner_autoscaling_machine_states` | このプロバイダーの状態別のマシンの数。 | | `gitlab_runner_concurrent` | 同時実行設定の値。 | | `gitlab_runner_errors_total` | キャッチされたエラーの数。このメトリクスは、ログの行を追跡するカウンターです。このメトリクスには`level`というラベルが含まれています。使用可能な値は`warning`と`error`です。このメトリクスを含める場合は、監視時に`rate()`または`increase()`を使用してください。つまり、警告またはエラーの発生率が上昇していることが判明した場合には、詳しい調査が必要な問題を示唆している可能性があります。 | | `gitlab_runner_jobs` | これにより、(ラベル内のさまざまなスコープで)実行されているジョブの数が表示されます。 | | `gitlab_runner_job_duration_seconds` | ジョブ期間のヒストグラム。 | | `gitlab_runner_job_queue_duration_seconds` | ジョブキュー期間を表すヒストグラム。 | | `gitlab_runner_acceptable_job_queuing_duration_exceeded_total` | 設定されたキューイング時間のしきい値をジョブが超過する頻度をカウントします。 | | `gitlab_runner_job_stage_duration_seconds` | 各ステージのジョブ期間を表すヒストグラム。このメトリクスは**高カーディナリティメトリクス**です。詳細については、[高カーディナリティメトリクスのセクション](#high-cardinality-metrics)を参照してください。 | | `gitlab_runner_jobs_total` | 実行されたジョブの合計数を表示します。 | | `gitlab_runner_limit` | 制限設定の現在の値。 | | `gitlab_runner_request_concurrency` | 新しいジョブに対する現在の同時リクエストの数。 | | `gitlab_runner_request_concurrency_exceeded_total` | 設定されている`request_concurrency`制限を超える過剰なリクエストの数。 | | `gitlab_runner_version_info` | さまざまなビルド統計フィールドでラベル付けされている、定数値`1`を持つメトリクス。 | | `process_cpu_seconds_total` | 消費されたユーザーCPU時間とシステムCPU時間の合計(秒単位)。 | | `process_max_fds` | オープンファイル記述子の最大数。 | | `process_open_fds` | オープンファイル記述子の数。 | | `process_resident_memory_bytes` | 常駐メモリのサイズ(バイト単位)。 | | `process_start_time_seconds` | Unixエポックからの秒数で測定された、プロセスの開始時間。 | | `process_virtual_memory_bytes` | 仮想メモリのサイズ(バイト単位)。 | | `process_virtual_memory_max_bytes` | 利用可能な仮想メモリの最大量(バイト単位)。 | ### Grafanaダッシュボードの設定に関するヒント {#grafana-dashboard-configuration-tips} この[公開リポジトリ](https://gitlab.com/gitlab-com/runbooks/-/tree/master/dashboards/ci-runners)には、GitLab.comでRunnerフリートを運用するために使用するGrafanaダッシュボードのソースコードがあります。 GitLab.comの多数のメトリクスを追跡しています。クラウドベースのCI/CDの大規模プロバイダーとして、イシューをデバッグできるように、システムをさまざまな観点から把握する必要があります。ほとんどの場合、Self-Managed Runnerフリートは、GitLab.comで追跡している大量のメトリクスを追跡する必要はありません。 Runnerフリートのモニタリングに使用する必要がある重要なダッシュボードの一部を以下に示します。 **Jobs started on runners**: - 選択した時間間隔にわたってRunnerフリートで実行されたジョブの合計の概要を表示します。 - 使用状況の傾向を表示します。このダッシュボードは、少なくとも毎週分析する必要があります。 このデータをジョブ期間などのメトリクスに関連付けて、CI/CDジョブのパフォーマンスSLOを満たすために、設定の変更が必要かどうか、またはキャパシティのアップグレードが必要かどうかを判断できるようにします。 **Job duration**: - Runnerフリートのパフォーマンスとスケーリングを分析します。 **Runner capacity**: - 実行中のジョブの数を、limitまたはconcurrentの値で割った値を表示します。 - 追加のジョブを実行できるキャパシティがまだあるかどうかを判断します。 ### KubernetesでのRunnerのモニタリングに関する考慮事項 {#considerations-for-monitoring-runners-on-kubernetes} OpenShift、Amazon EKS、GKEなどのKubernetesプラットフォームでホストされているRunnerフリートの場合は、別の方法でGrafanaダッシュボードをセットアップします。 Kubernetesでは、Runner CI/CDジョブ実行ポッドを頻繁に作成および削除することがあります。このような場合は、Runnerマネージャーポッドをモニタリングし、次の機能を実装する予定を立てておく必要があります: - ゲージ: 異なるソースからの同一メトリクスの集計を表示します。 - カウンター: `rate`または`increase`関数を適用するときにカウンターをリセットします。 ## 高カーディナリティメトリクス {#high-cardinality-metrics} 一部のメトリクスは、高カーディナリティであるために、インジェストおよび保存の際にリソースを大量に消費する可能性があります。高カーディナリティとなるのは、多数の使用可能な値があるラベルがメトリクスに含まれており、これによって大量の一意の時系列データポイントが作成される場合です。 パフォーマンスを最適化するために、このようなメトリクスはデフォルトでは有効になっていません。[FF_EXPORT_HIGH_CARDINALITY_METRICS機能フラグ](../configuration/feature-flags.md)を使用して切り替えることができます。 ### 高カーディナリティメトリクスのリスト {#list-of-high-cardinality-metrics} - `gitlab_runner_job_stage_duration_seconds`: 個々のジョブステージの期間(秒単位)を測定します。このメトリクスには`stage`ラベルが含まれており、定義済みの値として次のものがあります: - `resolve_secrets` - `prepare_executor` - `prepare_script` - `get_sources` - `clear_worktree` - `restore_cache` - `download_artifacts` - `after_script` - `step_script` - `archive_cache` - `archive_cache_on_failure` - `upload_artifacts_on_success` - `upload_artifacts_on_failure` - `cleanup_file_variables` さらに、このリストに`step_run`などのカスタムユーザー定義のステップが含まれる場合があります。 ### 高カーディナリティメトリクスを管理する {#managing-high-cardinality-metrics} [Prometheusのrelabel設定](https://prometheus.io/docs/prometheus/latest/configuration/configuration/#relabel_config)を使用して不要なラベル値またはメトリクス全体を削除することで、カーディナリティを制御および削減できます。 #### 特定のステージを削除する設定の例 {#example-configuration-to-remove-specific-stages} 次の設定は、`stage`ラベルに`prepare_executor`値が設定されているすべてのメトリクスを削除します: ```yaml scrape_configs: - job_name: 'gitlab_runner_metrics' static_configs: - targets: ['localhost:9252'] metric_relabel_configs: - source_labels: [__name__, "stage"] regex: "gitlab_runner_job_stage_duration_seconds;prepare_executor" action: drop ``` #### 関連するステージのみを保持する例 {#example-to-keep-only-relevant-stages} 次の設定は、`step_script`ステージのメトリクスのみを保持し、他のメトリクスを完全に破棄します: ```yaml scrape_configs: - job_name: 'gitlab_runner_metrics' static_configs: - targets: ['localhost:9252'] metric_relabel_configs: - source_labels: [__name__, "stage"] regex: "gitlab_runner_job_stage_duration_seconds;step_script" action: keep ``` ================================================ FILE: docs-locale/ja-jp/fleet_scaling/fleeting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Fleeting --- [Fleeting](https://gitlab.com/gitlab-org/fleeting/fleeting)は、クラウドプロバイダーのインスタンスグループに対して、プラグインベースの抽象化を提供する目的でRunnerが使用するライブラリです。 以下のexecutorは、RunnerをスケールするためにFleetingを使用します: - [Docker Autoscaler](../executors/docker_autoscaler.md) - [インスタンス](../executors/instance.md) ## Fleetingプラグインを検索 {#find-a-fleeting-plugin} GitLabは、以下の公式プラグインを管理しています: | クラウドプロバイダー | 備考 | |----------------------------------------------------------------------------|-------| | [Google Cloud](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud) | [Google Cloudインスタンスグループ](https://docs.cloud.google.com/compute/docs/instance-groups)を使用 | | [AWS](https://gitlab.com/gitlab-org/fleeting/plugins/aws) | [AWS Auto Scaling groups](https://docs.aws.amazon.com/autoscaling/ec2/userguide/auto-scaling-groups.html)を使用 | | [Azure](https://gitlab.com/gitlab-org/fleeting/plugins/azure) | Azure [Virtual Machine Scale Sets](https://learn.microsoft.com/en-us/azure/virtual-machine-scale-sets/overview)を使用します。[Uniform orchestration](https://learn.microsoft.com/en-us/azure/virtual-machine-scale-sets/virtual-machine-scale-sets-orchestration-modes#scale-sets-with-uniform-orchestration)モードのみがサポートされています。 | 以下のプラグインは、コミュニティによって管理されています: | クラウドプロバイダー | OCI参照 | 備考 | |----------------|---------------|-------| | [VMware vSphere](https://gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere) | `registry.gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere:latest` | VMware vSphereを使用して、既存のテンプレートからクローンを作成して仮想マシンを作成および管理します。[`govmomi vcsim`](https://github.com/vmware/govmomi/tree/main/vcsim)シミュレーターでテストされ、基本的なユースケースに対してコミュニティメンバーによって検証されています。制限されたvSphere権限では制限がある場合があります。[Fleeting Plugin VMware vSphere project](https://gitlab.com/santhanuv/fleeting-plugin-vmware-vsphere/-/issues)で関連するイシューを作成できます。| コミュニティで管理されているプラグインは、GitLab(コミュニティ)外のコントリビューターが所有、構築、ホスト、および管理しています。GitLabは、FleetingライブラリとAPIを所有および管理して、静的なコードレビューを提供します。GitLabは、必要なコンピューティング環境すべてにアクセスできないため、コミュニティのプラグインをテストできません。コミュニティメンバーは、OCIリポジトリにプラグインをビルド、テスト、および公開し、このページでマージリクエストを介して参照を提供する必要があります。OCI参照には、イシューのレポート先、プラグインのサポートと安定性のレベル、およびドキュメントの場所に関する注記を添付する必要があります。 ## Fleetingプラグインを構成 {#configure-a-fleeting-plugin} Fleetingを構成するには、`config.toml`で、[`[runners.autoscaler]`](../configuration/advanced-configuration.md#the-runnersautoscaler-section)構成セクションを使用します。 {{< alert type="note" >}} 各プラグインのREADME.mdファイルには、インストールと設定に関する重要な情報が含まれています。 {{< /alert >}} ## フリートプラグインをインストールする {#install-a-fleeting-plugin} Fleetingプラグインをインストールするには、次のいずれかを使用します: - OCIレジストリ配信(推奨) - 手動バイナリインストール ## OCIレジストリ配信でインストール {#install-with-the-oci-registry-distribution} {{< history >}} - [導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4690)OCIレジストリの配信(GitLab Runner 16.11内) {{< /history >}} プラグインは、UNIXシステムでは`~/.config/fleeting/plugins`に、Windowsでは`%APPDATA%/fleeting/plugins`にインストールされます。プラグインのインストール場所をオーバーライドするには、環境変数`FLEETING_PLUGIN_PATH`を更新します。 fleetingプラグインをインストールするには: 1. `config.toml`の`[runners.autoscaler]`セクションで、fleetingプラグインを追加します: {{< tabs >}} {{< tab title="AWS" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "aws:latest" ``` {{< /tab >}} {{< tab title="Google Cloud" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "googlecloud:latest" ``` {{< /tab >}} {{< tab title="Azure" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "azure:latest" ``` {{< /tab >}} {{< /tabs >}} 1. `gitlab-runner fleeting install`を実行します。 ### `plugin`形式 {#plugin-formats} `plugin`パラメータは、次の形式をサポートします: - `` - `:` - `/` - `/:` - `//` - `//:` 各項目の説明: - `registry.gitlab.com`はデフォルトレジストリです。 - `gitlab-org/fleeting/plugins`はデフォルトリポジトリです。 - `latest`はデフォルトバージョンです。 ### バージョン制約の形式 {#version-constraint-formats} `gitlab-runner fleeting install`コマンドは、リモートリポジトリで最新の一致するバージョンを見つけるために、バージョン制約を使用します。 Runnerを実行すると、バージョン制約を使用して、ローカルにインストールされている最新の一致するバージョンが検索されます。 次のバージョン制約形式を使用します: | 形式 | 説明 | |---------------------------|-------------| | `latest` | 最新バージョン。 | | `` | メジャーバージョンを選択します。たとえば、`1`は、`1.*.*`と一致するバージョンを選択します。 | | `.` | メジャーおよびマイナーバージョンを選択します。たとえば、`1.5`は、`1.5.*`と一致する最新バージョンを選択します。 | | `..` | メジャー、マイナーバージョン、およびパッチを選択します。たとえば、`1.5.1`は、バージョン`1.5.1`を選択します。 | ## バイナリを手動でインストール {#install-binary-manually} fleetingプラグインを手動でインストールするには: 1. システム用のfleetingプラグインバイナリをダウンロードします: - [AWS](https://gitlab.com/gitlab-org/fleeting/plugins/aws/-/releases)。 - [Google Cloud](https://gitlab.com/gitlab-org/fleeting/plugins/googlecloud/-/releases) - [Azure](https://gitlab.com/gitlab-org/fleeting/plugins/azure/-/releases) 1. バイナリの名前が`fleeting-plugin-`の形式であることを確認します。たとえば、`fleeting-plugin-aws`などです。 1. バイナリが`$PATH`から検出できることを確認します。たとえば、`/usr/local/bin`に移動します。 1. `config.toml`の`[runners.autoscaler]`セクションで、fleetingプラグインを追加します。例: {{< tabs >}} {{< tab title="AWS" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-aws" ``` {{< /tab >}} {{< tab title="Google Cloud" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-googlecloud" ``` {{< /tab >}} {{< tab title="Azure" >}} ```toml [[runners]] name = "my runner" url = "https://gitlab.com" token = "" shell = "sh" executor = "instance" [runners.autoscaler] plugin = "fleeting-plugin-azure" ``` {{< /tab >}} {{< /tabs >}} ## Fleetingプラグインの管理 {#fleeting-plugin-management} 次の`fleeting`サブコマンドを使用して、fleetingプラグインを管理します: | コマンド | 説明 | |----------------------------------|-------------| | `gitlab-runner fleeting install` | OCIレジストリ配信からfleetingプラグインをインストールします。 | | `gitlab-runner fleeting list` | 参照されているプラグインと使用されているバージョンを一覧表示します。 | | `gitlab-runner fleeting login` | プライベートレジストリにサインインします。 | ================================================ FILE: docs-locale/ja-jp/grit/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerインフラストラクチャツールキット --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated - ステータス: 実験的機能 {{< /details >}} [GitLab Runner Infrastructure Toolkit (GRIT)](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit)は、パブリッククラウドプロバイダー上で多くの一般的なランナーの設定を作成および管理するために使用できる、Terraformモジュールのライブラリです。 {{< alert type="note" >}} これは[実験的機能](https://docs.gitlab.com/policy/development_stages_support/#experiment)です。GRIT開発の状況について詳しくは、[エピック1](https://gitlab.com/groups/gitlab-org/ci-cd/runner-tools/-/epics/1)をご覧ください。この機能に関するフィードバックを提供するには、[イシュー84](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/issues/84)にコメントを残してください。 {{< /alert >}} ## GRITでランナーを作成する {#create-a-runner-with-grit} GRITを使用して、AWSでオートスケールLinux Dockerをデプロイするには、次の手順を実行します: 1. GitLabおよびAWSへのアクセスを提供するには、次の変数を設定します: - `GITLAB_TOKEN` - `AWS_REGION` - `AWS_SECRET_ACCESS_KEY` - `AWS_ACCESS_KEY_ID` 1. 最新の[GRITリリース](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/releases)をダウンロードし、`.local/grit`に展開します。 1. `main.tf`Terraformモジュールを作成します: ```hcl module "runner" { source = ".local/grit/scenarios/aws/linux/docker-autoscaler-default" name = "grit-runner" gitlab_project_id = "39258790" # gitlab.com/josephburnett/hello-runner runner_description = "Autoscaling Linux Docker runner on AWS deployed with GRIT. " runner_tags = ["aws", "linux"] max_instances = 5 min_support = "experimental" } ``` 1. モジュールを初期化して適用します: ```plaintext terraform init terraform apply ``` これらの手順では、GitLabプロジェクトに新しいランナーを作成します。ランナーマネージャーは、`docker-autoscaler` executorを使用して、`aws`および`linux`としてタグ付けされたジョブを実行します。ランナーは、ワークロードに基づいて、新しいオートスケールグループ(ASG)を介して1 ~ 5個のVMをプロビジョニングします。ASGは、ランナーチームが所有するパブリックAMIを使用します。ランナーマネージャーとASGはどちらも、新しいVPCで動作します。すべてのリソースは、指定された値(`grit-runner`)に基づいて命名されます。これにより、単一のAWSプロジェクト内で、異なる名前を持つこのモジュールの複数のインスタンスを作成できます。 ## サポートレベルと`min_support`パラメータ {#support-levels-and-the-min_support-parameter} すべてのGRITモジュールに`min_support`値を指定する必要があります。このパラメータは、オペレーターがデプロイに必要な最小サポートレベルを指定します。GRITモジュールは、`none`、`experimental`、`beta`、または`GA`のサポート指定に関連付けられています。目標は、すべてのモジュールが`GA`ステータスに到達することです。 `none`は特殊なケースです。主にテストおよび開発を目的とした、サポート保証のないモジュール。 `experimental`、`beta`、および`ga`のモジュールは、[GitLabの開発ステージの定義](https://docs.gitlab.com/policy/development_stages_support/)に準拠しています。 ### 責任共有モデル {#shared-responsibility-model} GRITは、作成者(モジュールの開発者)とオペレーター(GRITでデプロイするユーザー)間の責任共有モデルに基づいて動作します。各ロールの具体的な責任とサポートレベルの決定方法について詳しくは、GORPドキュメントの[「責任の共有」セクション](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/GORP.md#shared-responsibility)をご覧ください。 ## ランナーの状態を管理する {#manage-runner-state} ランナーを維持するには、次の手順を実行します: 1. GitLabプロジェクトにモジュールをチェックインします。 1. Terraformの状態をGitLab Terraformの`backend.tf`に保存します: ```hcl terraform { backend "http" {} } ``` 1. `.gitlab-ci.yml`を使用して変更を適用します: ```yaml terraform-apply: variables: TF_HTTP_LOCK_ADDRESS: "https://gitlab.com/api/v4/projects/${CI_PROJECT_ID}/terraform/state/${NAME}/lock" TF_HTTP_UNLOCK_ADDRESS: ${TF_HTTP_LOCK_ADDRESS} TF_HTTP_USERNAME: ${GITLAB_USER_LOGIN} TF_HTTP_PASSWORD: ${GITLAB_TOKEN} TF_HTTP_LOCK_METHOD: POST TF_HTTP_UNLOCK_METHOD: DELETE script: - terraform init - terraform apply -auto-approve ``` ### ランナーを削除する {#delete-a-runner} ランナーとそのインフラストラクチャを削除するには、次の手順を実行します: ```plaintext terraform destroy ``` ## サポートされている設定 {#supported-configurations} | プロバイダー | サービス | アーキテクチャ | OS | executor | 機能サポート | |--------------|---------|--------|-------|-------------------|-----------------| | AWS | EC2 | x86-64 | Linux | Docker Autoscaler | 実験的 | | AWS | EC2 | Arm64 | Linux | Docker Autoscaler | 実験的 | | Google Cloud | GCE | x86-64 | Linux | Docker Autoscaler | 実験的 | | Google Cloud | GKE | x86-64 | Linux | Kubernetes | 実験的 | ## 高度な設定 {#advanced-configuration} ### トップレベルモジュール {#top-level-modules} プロバイダーのトップレベルモジュールは、高度に分離されているか、ランナーのオプションの設定の側面を表します。たとえば、`fleeting`と`runner`は、アクセス認証情報とインスタンスグループ名のみを共有するため、別個のモジュールです。`vpc`は、一部のユーザーが独自のVPCを提供するため、別個のモジュールです。既存のVPCを持つユーザーは、他のGRITモジュールと接続するために、一致する入力構造を作成するだけで済みます。 たとえば、トップレベルのVPCモジュールを使用して、VPCを必要とするモジュールのVPCを作成できます: ```hcl module "runner" { source = ".local/grit/modules/aws/runner" vpc = { id = module.vpc.id subnet_ids = module.vpc.subnet_ids } # ...additional config omitted } module "vpc" { source = ".local/grit/modules/aws/vpc" zone = "us-east-1b" cidr = "10.0.0.0/16" subnet_cidr = "10.0.0.0/24" } ``` ユーザーは独自のVPCを提供でき、GRITのVPCモジュールを使用する必要はありません: ```hcl module "runner" { source = ".local/grit/modules/aws/runner" vpc = { id = PREEXISTING_VPC_ID subnet_ids = [PREEXISTING_SUBNET_ID] } # ...additional config omitted } ``` ## GRITへのコントリビュート {#contributing-to-grit} GRITは、コミュニティからのコントリビューションを歓迎します。コントリビュートする前に、次のリソースを確認してください: ### デベロッパーCertificate of Originおよびライセンス {#developer-certificate-of-origin-and-license} GRITへのすべてのコントリビューションは、[デベロッパーCertificate of Originおよびライセンス](https://docs.gitlab.com/legal/developer_certificate_of_origin/)に従うものとします。コントリビュートすることにより、現在および将来のGitLab, Inc. に提出されたコントリビューションに対するこれらの利用規約に同意したものとみなされます。 ### 行動規範 {#code-of-conduct} GRITは、[コントリビューター規約](https://www.contributor-covenant.org)から採用されたGitLabの行動規範に従います。このプロジェクトは、バックグラウンドやアイデンティティに関係なく、誰もがハラスメントのない体験ができるようにすることに取り組んでいます。 ### コントリビューションのガイドライン {#contribution-guidelines} GRITにコントリビュートする場合は、次のガイドラインに従ってください: - 全体的なアーキテクチャ設計については、[GORPガイドライン](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/GORP.md)を確認してください。 - [Terraformを使用するためのGoogleのベストプラクティス](https://docs.cloud.google.com/docs/terraform/best-practices/general-style-structure)に従ってください。 - 複雑さと反復を軽減するために、再利用可能なモジュールアプローチに従ってください。 - コントリビューションに適切なGoテストを含めます。 ### テストとLint {#testing-and-linting} GRITは、品質を確保するために、いくつかのテストツールとLintツールを使用しています: - 統合テスト: Terraformプランを検証するために、[Terratest](https://terratest.gruntwork.io/)を使用します。 - エンドツーエンドテスト: [e2eディレクトリ](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/e2e/README.md)で利用できます。 - Terraform Lint: `tflint`、`terraform fmt`、および`terraform validate`を使用します。 - Go Lint: Goコード(主にテスト)には、[golangci-lint](https://golangci-lint.run/)を使用します。 - ドキュメント: [GitLabドキュメントのスタイルガイドライン](https://docs.gitlab.com/development/documentation/styleguide/)に従い、`vale`と`markdownlint`を使用します。 開発環境のセットアップ、テストの実行、Lintの詳細な手順については、[CONTRIBUTING.md](https://gitlab.com/gitlab-org/ci-cd/runner-tools/grit/-/blob/main/CONTRIBUTING.md)を参照してください。 ## GRITのユーザー {#who-uses-grit} GRITは、GitLabエコシステム内のさまざまなチームやサービスで採用されています: - **[GitLab Dedicated](https://about.gitlab.com/dedicated/)**: [GitLab Dedicatedのホストされたランナー](https://docs.gitlab.com/administration/dedicated/hosted_runners/)は、GRITを使用してランナーインフラストラクチャをプロビジョニングおよび管理します。 - **GitLab Self-Managed**: GRITは、多くのGitLab Self-Managedのお客様から非常に要望されています。一部の組織では、標準化された方法でランナーのデプロイを管理するために、GRITの採用を開始しています。 組織でGRITを使用していて、このセクションで紹介したい場合は、マージリクエストを開いてください。 ================================================ FILE: docs-locale/ja-jp/install/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: CI/CDジョブ用ソフトウェア title: GitLab Runnerをインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner)は、GitLabで定義されたCI/CDジョブを実行します。GitLab Runnerは、単一のバイナリとして実行でき、言語固有の要件はありません。 セキュリティとパフォーマンス上の理由から、GitLab Runnerは、GitLabインスタンスをホストするマシンとは別のマシンにインストールしてください。 ## サポート対象のオペレーティングシステム {#supported-operating-systems} GitLab Runnerは以下にインストールできます: - [GitLabリポジトリ](linux-repository.md)または[手動](linux-manually.md)によるLinux - [FreeBSD](freebsd.md) - [macOS](osx.md) - [Windows](windows.md) - [z/OS](z-os.md) [Bleeding-エッジバイナリ](bleeding-edge.md)も利用できます。 別のオペレーティングシステムを使用するには、そのオペレーティングシステムがGoバイナリをビルドできることを確認してください。 ## サポートされているコンテナ {#supported-containers} GitLab Runnerは以下とともにインストールできます: - [Docker](docker.md) - [GitLab Helmチャート](kubernetes.md) - [Kubernetes向けGitLabエージェント](kubernetes-agent.md) - [GitLab Operator](operator.md)を使用する ## サポートされているアーキテクチャ {#supported-architectures} GitLab Runnerは、次のアーキテクチャで使用できます: - x86 - AMD64 - ARM64 - ARM - s390x - ppc64le - riscv64 ## システム要件 {#system-requirements} GitLab Runnerのシステム要件は、以下によって異なります: - CI/CDジョブの予想されるCPU負荷 - CI/CDジョブの予想されるメモリ使用量 - 同時CI/CDジョブの数 - アクティブな開発中のプロジェクト数 - 並行して作業することが予想されるデベロッパーの数 GitLab.comで利用可能なマシンの種類について詳しくは、[GitLabホストされたランナー](https://docs.gitlab.com/ci/runners/)を参照してください。 ## FIPS準拠GitLab Runner {#fips-compliant-gitlab-runner} FIPS 140-2に準拠したGitLab Runnerバイナリは、Red Hat Enterprise Linux(RHEL)ディストリビューションおよびAMD64アーキテクチャで利用できます。他のディストリビューションとアーキテクチャのサポートは、[イシュー28814](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28814)で提案されています。 このバイナリは[Red Hat Goコンパイラ](https://developers.redhat.com/blog/2019/06/24/go-and-fips-140-2-on-red-hat-enterprise-linux)でビルドされ、FIPS 140-2で検証された暗号学的ライブラリに呼び出す。[UBI-8ミニマルイメージ](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html-single/building_running_and_managing_containers/index#con_understanding-the-ubi-minimal-images_assembly_types-of-container-images)は、GitLab Runner FIPSイメージを作成するためのベースとして使用されます。 RHELでFIPS準拠のGitLab Runnerを使用する方法について詳しくは、[FIPSモードへのRHELのスイッチ](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html/security_hardening/switching-rhel-to-fips-mode_security-hardening)を参照してください。 ================================================ FILE: docs-locale/ja-jp/install/bleeding-edge.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner最新リリース --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="warning" >}} これらのGitLab Runnerのリリースは最新であり、`main`ブランチから直接ビルドされているため、テストされていない可能性があります。ご自身の責任においてご利用ください。 {{< /alert >}} ## スタンドアロンバイナリをダウンロードする {#download-the-standalone-binaries} - - - - - - - - - - - その後、次のコマンドを使用してGitLab Runnerを実行できます: ```shell chmod +x gitlab-runner-linux-amd64 ./gitlab-runner-linux-amd64 run ``` ## DebianまたはUbuntu用のパッケージをダウンロードする {#download-one-of-the-packages-for-debian-or-ubuntu} - - - - - - - ### エクスポートされたrunner-helperイメージパッケージをダウンロードする {#download-the-exported-runner-helper-images-package} runner-helperイメージパッケージは、GitLab Runner `.deb`パッケージに必要な依存関係です。 次の場所からパッケージをダウンロードします: ```plaintext https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner-helper-images.deb ``` その後、次のコマンドを使用してインストールできます: ```shell dpkg -i gitlab-runner-helper-images.deb gitlab-runner_.deb ``` ## Red HatまたはCentOS用のパッケージをダウンロードする {#download-one-of-the-packages-for-red-hat-or-centos} - - - - - - - ### エクスポートされたrunner-helperイメージパッケージをダウンロードする {#download-the-exported-runner-helper-images-package-1} runner-helperイメージパッケージは、GitLab Runner `.rpm`パッケージに必要な依存関係です。 次の場所からパッケージをダウンロードします: ```plaintext https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner-helper-images.rpm ``` その後、次のコマンドを使用してインストールできます: ```shell rpm -i gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ## その他のタグ付きリリースをダウンロードする {#download-any-other-tagged-release} `main`を`tag`(`v16.5.0`など)または`latest`(最新の安定版)のいずれかに置き換えます。タグの一覧については、を参照してください。次に例を示します: - - - `https`経由でのダウンロードに問題がある場合は、プレーンな`http`にフォールバックします: - - - ================================================ FILE: docs-locale/ja-jp/install/docker.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: コンテナ内でGitLab Runnerを実行する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} DockerコンテナでGitLab Runnerを実行して、CI/CDジョブを実行できます。GitLab Runner Dockerイメージには、以下の実行に必要なすべての依存関係が含まれています: - GitLab Runnerを実行する。 - コンテナ内でCI/CDジョブを実行する。 GitLab Runner Dockerイメージは、[UbuntuまたはAlpine Linux](#docker-images)をベースとして使用しています。ホストにGitLab Runnerを直接インストールする場合と同様に、標準の`gitlab-runner`コマンドをラップします。 `gitlab-runner`コマンドはDockerコンテナで実行されます。このセットアップでは、Dockerデーモンに対する完全な制御が各GitLab Runnerコンテナに委譲されます。このため、他のペイロードも実行するDockerデーモン内部でGitLab Runnerを実行すると、分離の保証が損なわれます。 このセットアップでは、以下に示すように、実行するどのGitLab Runnerコマンドにも、それに相当する`docker run`のコマンドがあります: - Runnerコマンド: `gitlab-runner ` - Dockerコマンド: `docker run gitlab/gitlab-runner ` たとえば、GitLab Runnerのトップレベルのヘルプ情報を取得するには、コマンドの`gitlab-runner`の部分を`docker run [docker options] gitlab/gitlab-runner`に置き換えます。次に例を示します: ```shell docker run --rm -t -i gitlab/gitlab-runner --help NAME: gitlab-runner - a GitLab Runner USAGE: gitlab-runner [global options] command [command options] [arguments...] VERSION: 17.9.1 (bbf75488) (...) ``` ## Docker Engineのバージョンの互換性 {#docker-engine-version-compatibility} Docker EngineとGitLab Runnerコンテナイメージのバージョンが一致している必要はありません。GitLab Runnerイメージには下位互換性と上位互換性があります。最新の機能とセキュリティ更新を確実に入手するには、常に最新の安定版[Docker Engineバージョン](https://docs.docker.com/engine/install/)を使用する必要があります。 ## Dockerイメージをインストールしてコンテナを起動する {#install-the-docker-image-and-start-the-container} 前提要件: - [Dockerをインストール](https://docs.docker.com/get-started/get-docker/)していること。 - [FAQ](../faq/_index.md)を読んで、GitLab Runnerの一般的な問題を理解していること。 1. `docker pull gitlab/gitlab-runner:`コマンドを使用して、`gitlab-runner` Dockerイメージをダウンロードします。 利用可能なバージョンタグのリストについては、[GitLab Runnerのタグ](https://hub.docker.com/r/gitlab/gitlab-runner/tags)を参照してください。 1. `docker run -d [options] `コマンドを使用して、`gitlab-runner` Dockerイメージを実行します。 1. Dockerコンテナで`gitlab-runner`を実行する場合は、コンテナの再起動時に設定が失われないようにしてください。永続ボリュームをマウントして設定を保存します。ボリュームは次のいずれかにマウントできます: - [ローカルシステムボリューム](#from-a-local-system-volume) - [Dockerボリューム](#from-a-docker-volume) 1. (オプション)[`session_server`](../configuration/advanced-configuration.md)を使用している場合は、`docker run`コマンドに`-p 8093:8093`を追加して、ポート`8093`を公開します。 1. (オプション)オートスケールにDocker Machine Executorを使用するには、`docker run`コマンドにボリュームマウントを追加して、Docker Machineストレージパス(`/root/.docker/machine`)をマウントします: - システムボリュームマウントの場合は、`-v /srv/gitlab-runner/docker-machine-config:/root/.docker/machine`を追加 - Dockerの名前付きボリュームの場合は、`-v docker-machine-config:/root/.docker/machine`を追加 1. [新しいRunnerを登録します](../register/_index.md)。ジョブを取得するには、GitLab Runnerコンテナを登録する必要があります。 利用可能な設定オプションには次のものがあります: - コンテナのタイムゾーンを設定するには、フラグ`--env TZ=`を使用します。[利用可能なタイムゾーンの一覧](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)を参照してください。 - [FIPS準拠のGitLab Runner](_index.md#fips-compliant-gitlab-runner)イメージを使用する場合は、`redhat/ubi9-micro`ベースの`gitlab/gitlab-runner:ubi-fips`タグを使用します。 - [信頼できるSSLサーバー証明書をインストールします](#install-trusted-ssl-server-certificates)。 ### ローカルシステムボリュームを使用する場合 {#from-a-local-system-volume} `gitlab-runner`コンテナにマウントされた設定ボリュームやその他のリソースとしてローカルシステムを使用するには、次のようにします: 1. (オプション)MacOSシステムでは、デフォルトの場合、`/srv`は存在しません。セットアップ用に`/private/srv`を作成するか、または別のプライベートディレクトリを作成します。 1. 次のコマンドを実行します(必要に応じて修正): ```shell docker run -d --name gitlab-runner --restart always \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ -v /var/run/docker.sock:/var/run/docker.sock \ gitlab/gitlab-runner:latest ``` ### Dockerボリュームを使用する場合 {#from-a-docker-volume} 設定コンテナを使用してカスタムデータボリュームをマウントするには、次の手順に従います: 1. Dockerボリュームを作成します: ```shell docker volume create gitlab-runner-config ``` 1. 作成したボリュームを使用してGitLab Runnerコンテナを起動します: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v gitlab-runner-config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` ## Runnerの設定を更新する {#update-runner-configuration} `config.toml`で[Runnerの設定を変更](../configuration/advanced-configuration.md)したら、`docker stop`と`docker run`でコンテナを再起動して、変更を適用します。 ## Runnerのバージョンをアップグレードする {#upgrade-runner-version} 前提要件: - 最初に使用した方法(`-v /srv/gitlab-runner/config:/etc/gitlab-runner`または`-v gitlab-runner-config:/etc/gitlab-runner`)でデータボリュームをマウントする必要があります。 1. 最新バージョン(または特定のタグ)をプルします: ```shell docker pull gitlab/gitlab-runner:latest ``` 1. 既存のコンテナを停止して削除します: ```shell docker stop gitlab-runner && docker rm gitlab-runner ``` 1. 最初に使用した方法でコンテナを起動します: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` ## Runnerのログを表示する {#view-runner-logs} ログファイルの場所は、Runnerの起動方法によって異なります。次のようになります: - **フォアグラウンドタスク**として(ローカルにインストールされたバイナリとして、またはDockerコンテナ内で)起動する場合は、ログは`stdout`に出力されます。 - `systemd`などを使用して**システムサービス**として起動する場合は、Syslogなどのシステムログ生成メカニズムでログが使用可能になります。 - **Dockerベースのサービス**として起動する場合は、`docker logs`コマンドを使用します。これは、`gitlab-runner ...`コマンドがコンテナのメインプロセスであるためです。 たとえば、次のコマンドでコンテナを起動すると、その名前は`gitlab-runner`に設定されます: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner \ gitlab/gitlab-runner:latest ``` ログを表示するには、`gitlab-runner`をコンテナ名に置き換えて次のコマンドを実行します: ```shell docker logs gitlab-runner ``` コンテナログの処理の詳細については、Dockerドキュメントの[`docker container logs`](https://docs.docker.com/reference/cli/docker/container/logs/)を参照してください。 ## 信頼できるSSLサーバー証明書をインストールする {#install-trusted-ssl-server-certificates} GitLab CI/CDサーバーが自己署名SSL証明書を使用している場合は、RunnerコンテナがGitLab CIサーバー証明書を信頼していることを確認してください。これにより、通信障害の発生を防止できます。 前提要件: - `ca.crt`ファイルには、GitLab Runnerに信頼させたいすべてのサーバーのルート証明書が含まれている必要があります。 1. (オプション)`gitlab/gitlab-runner`イメージは、`/etc/gitlab-runner/certs/ca.crt`で信頼できるSSL証明書を探します。この動作を変更するには、`-e "CA_CERTIFICATES_PATH=/DIR/CERT"`設定オプションを使用します。 1. `ca.crt`ファイルをデータボリューム(またはコンテナ)の`certs`ディレクトリにコピーします。 1. (オプション)コンテナがすでに実行されている場合は、再起動して起動時に`ca.crt`ファイルをインポートします。 ## Dockerイメージ {#docker-images} GitLab Runner 17.10.0では、AlpineベースのDockerイメージはAlpine 3.19を使用します。次のマルチプラットフォームDockerイメージが利用可能です: - `gitlab/gitlab-runner:latest` - Ubuntuベース、約800 MB - `gitlab/gitlab-runner:alpine` - Alpineベース、約460 MB UbuntuイメージとAlpineイメージの両方で利用可能なビルド手順については、[GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner/tree/main/dockerfiles)のソースを参照してください。 ### Runner Dockerイメージを作成する {#create-a-runner-docker-image} GitLabリポジトリで更新が利用可能になる前に、イメージのオペレーティングシステムをアップグレードできます。 前提要件: - IBM Zイメージを使用していないこと(`docker-machine`依存関係が含まれていないため)。このイメージは、Linux s390xまたはLinux ppc64leプラットフォーム向けにはメンテナンスされていません。現状については、[イシュー26551](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/26551)を参照してください。 最新のAlpineバージョン用の`gitlab-runner` Dockerイメージをビルドするには、次の手順に従います: 1. `alpine-upgrade/Dockerfile`を作成します。 ```dockerfile ARG GITLAB_RUNNER_IMAGE_TYPE ARG GITLAB_RUNNER_IMAGE_TAG FROM gitlab/${GITLAB_RUNNER_IMAGE_TYPE}:${GITLAB_RUNNER_IMAGE_TAG} RUN apk update RUN apk upgrade ``` 1. アップグレードされた`gitlab-runner`イメージを作成します。 ```shell GITLAB_RUNNER_IMAGE_TYPE=gitlab-runner \ GITLAB_RUNNER_IMAGE_TAG=alpine-v17.9.1 \ docker build -t $GITLAB_RUNNER_IMAGE_TYPE:$GITLAB_RUNNER_IMAGE_TAG \ --build-arg GITLAB_RUNNER_IMAGE_TYPE=$GITLAB_RUNNER_IMAGE_TYPE \ --build-arg GITLAB_RUNNER_IMAGE_TAG=$GITLAB_RUNNER_IMAGE_TAG \ -f alpine-upgrade/Dockerfile alpine-upgrade ``` 1. アップグレードされた`gitlab-runner-helper`イメージを作成します。 ```shell GITLAB_RUNNER_IMAGE_TYPE=gitlab-runner-helper \ GITLAB_RUNNER_IMAGE_TAG=x86_64-v17.9.1 \ docker build -t $GITLAB_RUNNER_IMAGE_TYPE:$GITLAB_RUNNER_IMAGE_TAG \ --build-arg GITLAB_RUNNER_IMAGE_TYPE=$GITLAB_RUNNER_IMAGE_TYPE \ --build-arg GITLAB_RUNNER_IMAGE_TAG=$GITLAB_RUNNER_IMAGE_TAG \ -f alpine-upgrade/Dockerfile alpine-upgrade ``` ## コンテナでSELinuxを使用する {#use-selinux-in-your-container} CentOS、Red Hat、Fedoraなどの一部のディストリビューションでは、基盤となるシステムのセキュリティを強化するために、デフォルトでSELinux(Security-Enhanced Linux)が使用されています。 この設定には注意が必要です。 前提要件: - [Docker executor](../executors/docker.md)を使用してコンテナでビルドを実行するには、Runnerが`/var/run/docker.sock`にアクセスできる必要があります。 - 強制モードでSELinuxを使用する場合は、Runnerが`/var/run/docker.sock`にアクセスするときに`Permission denied`エラーが発生しないようにするため、[`selinux-dockersock`](https://github.com/dpw/selinux-dockersock)をインストールします。 1. ホストに永続ディレクトリを作成します(`mkdir -p /srv/gitlab-runner/config`)。 1. ボリュームで`:Z`を使用してDockerを実行します: ```shell docker run -d --name gitlab-runner --restart always \ -v /var/run/docker.sock:/var/run/docker.sock \ -v /srv/gitlab-runner/config:/etc/gitlab-runner:Z \ gitlab/gitlab-runner:latest ``` ================================================ FILE: docs-locale/ja-jp/install/environment_variables_in_helm_charts.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner Helmチャートで環境変数を設定する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} 環境変数は、アプリケーションがランタイム時の動作を調整するために使用できる情報を含むキー/バリューペアです。これらの変数は、コンテナの環境に挿入されます。これらの変数を使用して、アプリケーションに必要な設定データ、シークレット、またはその他の動的情報を渡すことができます。 GitLab Runner Helmチャートで環境変数を設定するには、次のものを使用します: - [`runners.config`プロパティ](#use-the-runnersconfig-property) - [`values.yaml`のプロパティ](#use-valuesyaml-properties) ## `runners.config`プロパティを使用してください。 {#use-the-runnersconfig-property} `config.toml`ファイルで行うのと同様に、`runners.config`プロパティを使用して環境変数を設定できます: ```yaml runners: config: | [[runners]] shell = "bash" [runners.kubernetes] host = "" environment = ["FF_USE_ADVANCED_POD_SPEC_CONFIGURATION=true"] ``` この方法で定義された変数は、ジョブPodとGitLab Runner Managerコンテナの両方に適用されます。上記の例では、`FF_USE_ADVANCED_POD_SPEC_CONFIGURATION`機能フラグが環境変数として設定されており、GitLab Runner Managerがその動作を変更するために使用します。 ## `values.yaml`プロパティの使用 {#use-valuesyaml-properties} `values.yaml`の次のプロパティを使用して環境変数を設定することもできます。これらの変数は、GitLab Runner Managerコンテナにのみ影響します。 - `envVars` ```yaml envVars: - name: RUNNER_EXECUTOR value: kubernetes ``` - `extraEnv` ```yaml extraEnv: CACHE_S3_SERVER_ADDRESS: s3.amazonaws.com CACHE_S3_BUCKET_NAME: runners-cache CACHE_S3_BUCKET_LOCATION: us-east-1 CACHE_SHARED: true ``` - `extraEnvFrom` ```yaml extraEnvFrom: {} CACHE_S3_ACCESS_KEY: secretKeyRef: name: s3access key: accesskey CACHE_S3_SECRET_KEY: secretKeyRef: name: s3access key: secretkey ``` `extraEnvFrom`の詳細については、以下を参照してください: - [`Distribute Credentials Securely Using Secrets`](https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/) - [`Use container fields as values for environment variables`](https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/#use-container-fields-as-values-for-environment-variables) ================================================ FILE: docs-locale/ja-jp/install/freebsd.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: FreeBSDにGitLab Runnerをインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< alert type="note" >}} FreeBSDバージョンも[bleeding edge](bleeding-edge.md)リリースとして利用できます。[FAQ](../faq/_index.md)セクションを参照してください。このセクションでは、GitLab Runnerに関する最も一般的な問題について説明しています。 {{< /alert >}} ## GitLab Runnerのインストール {#installing-gitlab-runner} FreeBSDにGitLab Runnerをインストールして構成する手順は次のとおりです: 1. `gitlab-runner`ユーザーとグループを作成します: ```shell sudo pw group add -n gitlab-runner sudo pw user add -n gitlab-runner -g gitlab-runner -s /usr/local/bin/bash sudo mkdir /home/gitlab-runner sudo chown gitlab-runner:gitlab-runner /home/gitlab-runner ``` 1. ご使用のシステムに対応するバイナリをダウンロードします: ```shell # For amd64 sudo fetch -o /usr/local/bin/gitlab-runner https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-freebsd-amd64 # For i386 sudo fetch -o /usr/local/bin/gitlab-runner https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-freebsd-386 ``` [Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. 実行権限を付与します: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. 正しい権限で空のログファイルを作成します: ```shell sudo touch /var/log/gitlab_runner.log && sudo chown gitlab-runner:gitlab-runner /var/log/gitlab_runner.log ``` 1. `rc.d`ディレクトリが存在しない場合は作成します: ```shell mkdir -p /usr/local/etc/rc.d ``` 1. `rc.d`内に`gitlab_runner`スクリプトを作成します: Bashユーザーは以下を実行できます: ```shell sudo bash -c 'cat > /usr/local/etc/rc.d/gitlab_runner' << "EOF" #!/bin/sh # PROVIDE: gitlab_runner # REQUIRE: DAEMON NETWORKING # BEFORE: # KEYWORD: . /etc/rc.subr name="gitlab_runner" rcvar="gitlab_runner_enable" user="gitlab-runner" user_home="/home/gitlab-runner" command="/usr/local/bin/gitlab-runner" command_args="run" pidfile="/var/run/${name}.pid" start_cmd="gitlab_runner_start" gitlab_runner_start() { export USER=${user} export HOME=${user_home} if checkyesno ${rcvar}; then cd ${user_home} /usr/sbin/daemon -u ${user} -p ${pidfile} ${command} ${command_args} > /var/log/gitlab_runner.log 2>&1 fi } load_rc_config $name run_rc_command $1 EOF ``` bashを使用していない場合は、`/usr/local/etc/rc.d/gitlab_runner`という名前のファイルを作成し、次のコンテンツを含めます: ```shell #!/bin/sh # PROVIDE: gitlab_runner # REQUIRE: DAEMON NETWORKING # BEFORE: # KEYWORD: . /etc/rc.subr name="gitlab_runner" rcvar="gitlab_runner_enable" user="gitlab-runner" user_home="/home/gitlab-runner" command="/usr/local/bin/gitlab-runner" command_args="run" pidfile="/var/run/${name}.pid" start_cmd="gitlab_runner_start" gitlab_runner_start() { export USER=${user} export HOME=${user_home} if checkyesno ${rcvar}; then cd ${user_home} /usr/sbin/daemon -u ${user} -p ${pidfile} ${command} ${command_args} > /var/log/gitlab_runner.log 2>&1 fi } load_rc_config $name run_rc_command $1 ``` 1. `gitlab_runner`スクリプトを実行可能にします: ```shell sudo chmod +x /usr/local/etc/rc.d/gitlab_runner ``` 1. [Runnerを登録する](../register/_index.md) 1. `gitlab-runner`サービスを有効にして開始します: ```shell sudo sysrc gitlab_runner_enable=YES sudo service gitlab_runner start ``` 再起動後に`gitlab-runner`サービスを起動したくない場合は、次を使用します: ```shell sudo service gitlab_runner onestart ``` ================================================ FILE: docs-locale/ja-jp/install/kubernetes-agent.md ================================================ --- stage: Deploy group: Environments info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: エージェントを使用してGitLab Runnerをインストールします --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} [Kubernetes向けGitLabエージェント](https://docs.gitlab.com/user/clusters/agent/)をインストールして設定すると、エージェントを使用してクラスターにGitLab Runnerをインストールできます。 この[GitOpsワークフロー](https://docs.gitlab.com/user/clusters/agent/gitops/)を使用すると、リポジトリにGitLab Runnerの設定ファイルが含まれ、クラスターが自動的に更新されます。 {{< alert type="warning" >}} 暗号化されていないGitLab Runnerのシークレットを`runner-manifest.yaml`に追加すると、リポジトリファイル内のシークレットが公開される可能性があります。GitOpsワークフローでKubernetes Secretsを安全に管理するには、[Sealed Secrets](https://fluxcd.io/flux/guides/sealed-secrets/)または[SOPS](https://fluxcd.io/flux/guides/mozilla-sops/)を使用します。 {{< /alert >}} 1. [GitLab Runner](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml)のHelmチャートの値を確認します。 1. `runner-chart-values.yaml`ファイルを作成します。次に例を示します: ```yaml # The GitLab Server URL (with protocol) that you want to register the runner against # ref: https://docs.gitlab.com/runner/commands/#gitlab-runner-register # gitlabUrl: https://gitlab.my.domain.example.com/ # The registration token for adding new runners to the GitLab server # Retrieve this value from your GitLab instance # For more info: https://docs.gitlab.com/ci/runners/ # runnerRegistrationToken: "yrnZW46BrtBFqM7xDzE7dddd" # For RBAC support: rbac: create: true # Run all containers with the privileged flag enabled # This flag allows the docker:dind image to run if you need to run Docker commands # Read the docs before turning this on: # https://docs.gitlab.com/runner/executors/kubernetes/#using-dockerdind runners: privileged: true ``` 1. 単一のマニフェストファイルを作成して、GitLab Runnerチャートをクラスターエージェントと共にインストールします: ```shell helm template --namespace GITLAB-NAMESPACE gitlab-runner -f runner-chart-values.yaml gitlab/gitlab-runner > runner-manifest.yaml ``` `GITLAB-NAMESPACE`をネームスペースに置き換えます。[例を表示](#example-runner-manifest)。 1. `runner-manifest.yaml`ファイルを編集して、`ServiceAccount`の`namespace`を含めます。`helm template`の出力には、生成されたリソースに`ServiceAccount`ネームスペースが含まれていません。 ```yaml --- # Source: gitlab-runner/templates/service-account.yaml apiVersion: v1 kind: ServiceAccount metadata: annotations: name: gitlab-runner-gitlab-runner namespace: gitlab labels: ... ``` 1. `runner-manifest.yaml`をKubernetesマニフェストを保持するリポジトリにプッシュします。 1. [GitOps](https://docs.gitlab.com/user/clusters/agent/gitops/)を使用してRunnerマニフェストを同期するようにエージェントを設定します。次に例を示します: ```yaml gitops: manifest_projects: - id: path/to/manifest/project paths: - glob: 'path/to/runner-manifest.yaml' ``` これで、エージェントがマニフェストの更新についてリポジトリを確認するたびに、クラスターが更新されてGitLab Runnerが含まれるようになります。 ## Runnerマニフェストの例 {#example-runner-manifest} この例は、サンプルRunnerマニフェストファイルを示しています。プロジェクトのニーズに合わせて、独自の`manifest.yaml`ファイルを作成します。 ```yaml --- # Source: gitlab-runner/templates/service-account.yaml apiVersion: v1 kind: ServiceAccount metadata: annotations: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" --- # Source: gitlab-runner/templates/secrets.yaml apiVersion: v1 kind: Secret metadata: name: "gitlab-runner-gitlab-runner" labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" type: Opaque data: runner-registration-token: "FAKE-TOKEN" runner-token: "" --- # Source: gitlab-runner/templates/configmap.yaml apiVersion: v1 kind: ConfigMap metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" data: entrypoint: | #!/bin/bash set -e mkdir -p /home/gitlab-runner/.gitlab-runner/ cp /scripts/config.toml /home/gitlab-runner/.gitlab-runner/ # Register the runner if [[ -f /secrets/accesskey && -f /secrets/secretkey ]]; then export CACHE_S3_ACCESS_KEY=$(cat /secrets/accesskey) export CACHE_S3_SECRET_KEY=$(cat /secrets/secretkey) fi if [[ -f /secrets/gcs-application-credentials-file ]]; then export GOOGLE_APPLICATION_CREDENTIALS="/secrets/gcs-application-credentials-file" elif [[ -f /secrets/gcs-application-credentials-file ]]; then export GOOGLE_APPLICATION_CREDENTIALS="/secrets/gcs-application-credentials-file" else if [[ -f /secrets/gcs-access-id && -f /secrets/gcs-private-key ]]; then export CACHE_GCS_ACCESS_ID=$(cat /secrets/gcs-access-id) # echo -e used to make private key multiline (in google json auth key private key is one line with \n) export CACHE_GCS_PRIVATE_KEY=$(echo -e $(cat /secrets/gcs-private-key)) fi fi if [[ -f /secrets/runner-registration-token ]]; then export REGISTRATION_TOKEN=$(cat /secrets/runner-registration-token) fi if [[ -f /secrets/runner-token ]]; then export CI_SERVER_TOKEN=$(cat /secrets/runner-token) fi if ! sh /scripts/register-the-runner; then exit 1 fi # Run pre-entrypoint-script if ! bash /scripts/pre-entrypoint-script; then exit 1 fi # Start the runner exec /entrypoint run --user=gitlab-runner \ --working-directory=/home/gitlab-runner config.toml: | concurrent = 10 check_interval = 30 log_level = "info" listen_address = ':9252' configure: | set -e cp /init-secrets/* /secrets register-the-runner: | #!/bin/bash MAX_REGISTER_ATTEMPTS=30 for i in $(seq 1 "${MAX_REGISTER_ATTEMPTS}"); do echo "Registration attempt ${i} of ${MAX_REGISTER_ATTEMPTS}" /entrypoint register \ --non-interactive retval=$? if [ ${retval} = 0 ]; then break elif [ ${i} = ${MAX_REGISTER_ATTEMPTS} ]; then exit 1 fi sleep 5 done exit 0 check-live: | #!/bin/bash if /usr/bin/pgrep -f .*register-the-runner; then exit 0 elif /usr/bin/pgrep gitlab.*runner; then exit 0 else exit 1 fi pre-entrypoint-script: | --- # Source: gitlab-runner/templates/role.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: "Role" metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" rules: - apiGroups: [""] resources: ["*"] verbs: ["*"] --- # Source: gitlab-runner/templates/role-binding.yaml apiVersion: rbac.authorization.k8s.io/v1 kind: "RoleBinding" metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" roleRef: apiGroup: rbac.authorization.k8s.io kind: "Role" name: gitlab-runner-gitlab-runner subjects: - kind: ServiceAccount name: gitlab-runner-gitlab-runner namespace: "gitlab" --- # Source: gitlab-runner/templates/deployment.yaml apiVersion: apps/v1 kind: Deployment metadata: name: gitlab-runner-gitlab-runner labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" spec: replicas: 1 selector: matchLabels: app: gitlab-runner-gitlab-runner template: metadata: labels: app: gitlab-runner-gitlab-runner chart: gitlab-runner-0.58.2 release: "gitlab-runner" heritage: "Helm" annotations: checksum/configmap: a6623303f6fcc3a043e87ea937bb8399d2d0068a901aa9c3419ed5c7a5afa9db checksum/secrets: 32c7d2c16918961b7b84a005680f748e774f61c6f4e4da30650d400d781bbb30 prometheus.io/scrape: 'true' prometheus.io/port: '9252' spec: securityContext: runAsUser: 100 fsGroup: 65533 terminationGracePeriodSeconds: 3600 initContainers: - name: configure command: ['sh', '/config/configure'] image: gitlab/gitlab-runner:alpine-v13.4.1 imagePullPolicy: "IfNotPresent" env: - name: CI_SERVER_URL value: "https://gitlab.qa.joaocunha.eu/" - name: CLONE_URL value: "" - name: RUNNER_REQUEST_CONCURRENCY value: "1" - name: RUNNER_EXECUTOR value: "kubernetes" - name: REGISTER_LOCKED value: "true" - name: RUNNER_TAG_LIST value: "" - name: RUNNER_OUTPUT_LIMIT value: "4096" - name: KUBERNETES_IMAGE value: "ubuntu:16.04" - name: KUBERNETES_PRIVILEGED value: "true" - name: KUBERNETES_NAMESPACE value: "gitlab" - name: KUBERNETES_POLL_TIMEOUT value: "180" - name: KUBERNETES_CPU_LIMIT value: "" - name: KUBERNETES_CPU_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_LIMIT value: "" - name: KUBERNETES_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_CPU_REQUEST value: "" - name: KUBERNETES_CPU_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_REQUEST value: "" - name: KUBERNETES_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_SERVICE_ACCOUNT value: "" - name: KUBERNETES_SERVICE_CPU_LIMIT value: "" - name: KUBERNETES_SERVICE_MEMORY_LIMIT value: "" - name: KUBERNETES_SERVICE_CPU_REQUEST value: "" - name: KUBERNETES_SERVICE_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_CPU_LIMIT value: "" - name: KUBERNETES_HELPER_MEMORY_LIMIT value: "" - name: KUBERNETES_HELPER_CPU_REQUEST value: "" - name: KUBERNETES_HELPER_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_IMAGE value: "" - name: KUBERNETES_PULL_POLICY value: "" volumeMounts: - name: runner-secrets mountPath: /secrets readOnly: false - name: scripts mountPath: /config readOnly: true - name: init-runner-secrets mountPath: /init-secrets readOnly: true resources: {} serviceAccountName: gitlab-runner-gitlab-runner containers: - name: gitlab-runner-gitlab-runner image: gitlab/gitlab-runner:alpine-v13.4.1 imagePullPolicy: "IfNotPresent" lifecycle: preStop: exec: command: ["/entrypoint", "unregister", "--all-runners"] command: ["/bin/bash", "/scripts/entrypoint"] env: - name: CI_SERVER_URL value: "https://gitlab.qa.joaocunha.eu/" - name: CLONE_URL value: "" - name: RUNNER_REQUEST_CONCURRENCY value: "1" - name: RUNNER_EXECUTOR value: "kubernetes" - name: REGISTER_LOCKED value: "true" - name: RUNNER_TAG_LIST value: "" - name: RUNNER_OUTPUT_LIMIT value: "4096" - name: KUBERNETES_IMAGE value: "ubuntu:16.04" - name: KUBERNETES_PRIVILEGED value: "true" - name: KUBERNETES_NAMESPACE value: "gitlab" - name: KUBERNETES_POLL_TIMEOUT value: "180" - name: KUBERNETES_CPU_LIMIT value: "" - name: KUBERNETES_CPU_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_LIMIT value: "" - name: KUBERNETES_MEMORY_LIMIT_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_CPU_REQUEST value: "" - name: KUBERNETES_CPU_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_MEMORY_REQUEST value: "" - name: KUBERNETES_MEMORY_REQUEST_OVERWRITE_MAX_ALLOWED value: "" - name: KUBERNETES_SERVICE_ACCOUNT value: "" - name: KUBERNETES_SERVICE_CPU_LIMIT value: "" - name: KUBERNETES_SERVICE_MEMORY_LIMIT value: "" - name: KUBERNETES_SERVICE_CPU_REQUEST value: "" - name: KUBERNETES_SERVICE_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_CPU_LIMIT value: "" - name: KUBERNETES_HELPER_MEMORY_LIMIT value: "" - name: KUBERNETES_HELPER_CPU_REQUEST value: "" - name: KUBERNETES_HELPER_MEMORY_REQUEST value: "" - name: KUBERNETES_HELPER_IMAGE value: "" - name: KUBERNETES_PULL_POLICY value: "" livenessProbe: exec: command: ["/bin/bash", "/scripts/check-live"] initialDelaySeconds: 60 timeoutSeconds: 1 periodSeconds: 10 successThreshold: 1 failureThreshold: 3 readinessProbe: exec: command: ["/usr/bin/pgrep","gitlab.*runner"] initialDelaySeconds: 10 timeoutSeconds: 1 periodSeconds: 10 successThreshold: 1 failureThreshold: 3 ports: - name: metrics containerPort: 9252 volumeMounts: - name: runner-secrets mountPath: /secrets - name: etc-gitlab-runner mountPath: /home/gitlab-runner/.gitlab-runner - name: scripts mountPath: /scripts resources: {} volumes: - name: runner-secrets emptyDir: medium: "Memory" - name: etc-gitlab-runner emptyDir: medium: "Memory" - name: init-runner-secrets projected: sources: - secret: name: "gitlab-runner-gitlab-runner" items: - key: runner-registration-token path: runner-registration-token - key: runner-token path: runner-token - name: scripts configMap: name: gitlab-runner-gitlab-runner ``` ## トラブルシューティング {#troubleshooting} ### エラー: `associative list with keys has an element that omits key field "protocol"`(コンポーネントビルドエラー: specは有効なJSONスキーマである必要があります) {#error-associative-list-with-keys-has-an-element-that-omits-key-field-protocol} [Kubernetes v1.19のバグ](https://github.com/kubernetes-sigs/structured-merge-diff/issues/130)により、Kubernetes向けGitLabエージェントを使用してGitLab Runnerまたはその他のアプリケーションをインストールする際に、このエラーが表示される場合があります。これを修正するには、次のいずれかの方法があります: - Kubernetesクラスターをv1.20以降にアップグレードします。 - `containers.ports`サブセクションに`protocol: TCP`を追加します: ```yaml ... ports: - name: metrics containerPort: 9252 protocol: TCP ... ``` ================================================ FILE: docs-locale/ja-jp/install/kubernetes.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner Helmチャート --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runner Helmチャートは、GitLab RunnerインスタンスをKubernetesクラスターにデプロイするための公式の手法です。このチャートにより、GitLab Runnerが次のように設定されます: - GitLab Runnerの[Kubernetes executor](../executors/kubernetes/_index.md)を使用して実行する。 - 新しいCI/CDジョブごとに、指定されたネームスペースで新しいポッドをプロビジョニングする。 ## HelmチャートでGitLab Runnerを設定する {#configure-gitlab-runner-with-the-helm-chart} GitLab Runnerの設定の変更を`values.yaml`に保存します。このファイルの設定については、以下を参照してください: - チャートリポジトリ内のデフォルトの[`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml)設定。 - [値ファイル](https://helm.sh/docs/chart_template_guide/values_files/)に関するHelmドキュメント。値ファイルによってデフォルト値がオーバーライドされる仕組みが説明されています。 GitLab Runnerを適切に実行するには、設定ファイルで次の値を設定する必要があります: - `gitlabUrl`: Runnerの登録先のGitLabサーバーの完全なURL(`https://gitlab.example.com`など)。 - `rbac: { create: true }`: GitLab Runnerがジョブを実行するポッドを作成するためのRBAC(ロールベースのアクセス制御)ルールを作成します。 - 既存の`serviceAccount`を使用する場合は、`rbac`にサービスアカウント名を追加してください: ```yaml rbac: create: false serviceAccount: create: false name: your-service-account ``` - `serviceAccount`に必要な最小限の権限については、[Runner APIの権限を設定する](../executors/kubernetes/_index.md#configure-runner-api-permissions)を参照してください。 - `runnerToken`: [GitLab UIでRunnerを作成する](https://docs.gitlab.com/ci/runners/runners_scope/#create-an-instance-runner-with-a-runner-authentication-token)ときに取得した認証トークン。 - このトークンを直接設定するか、シークレットに保存します。 その他の[オプションの設定](kubernetes_helm_chart_configuration.md)も使用できます。 これで、[GitLab Runnerをインストール](#install-gitlab-runner-with-the-helm-chart)する準備ができました。 ## Helmチャートを使用してGitLab Runnerをインストールする {#install-gitlab-runner-with-the-helm-chart} 前提要件: - GitLabサーバーのAPIにクラスターからアクセスできること。 - ベータAPIが有効になっているKubernetes 1.4以降。 - `kubectl` CLIがローカルにインストールされ、クラスターに対して認証されていること。 - [Helmクライアント](https://helm.sh/docs/using_helm/#installing-the-helm-client)がマシンにローカルにインストールされていること。 - [`values.yaml`で必要な値](#configure-gitlab-runner-with-the-helm-chart)をすべて設定していること。 HelmチャートからGitLab Runnerをインストールするには、次の手順に従います: 1. GitLab Helmリポジトリを追加します。 ```shell helm repo add gitlab https://charts.gitlab.io ``` 1. Helm 2を使用している場合は、`helm init`でHelmを初期化します。 1. アクセスできるGitLab Runnerのバージョンを確認します: ```shell helm search repo -l gitlab/gitlab-runner ``` 1. GitLab Runnerの最新バージョンにアクセスできない場合は、次のコマンドでチャートを更新します: ```shell helm repo update gitlab ``` 1. `values.yaml`ファイルでGitLab Runnerを[設定](#configure-gitlab-runner-with-the-helm-chart)したら、必要に応じてパラメータを変更して、次のコマンドを実行します: ```shell # For Helm 2 helm install --namespace --name gitlab-runner -f gitlab/gitlab-runner # For Helm 3 helm install --namespace gitlab-runner -f gitlab/gitlab-runner ``` - ``: GitLab RunnerをインストールするKubernetesネームスペース。 - ``: カスタム設定を含む値ファイルのパス。作成するには、[HelmチャートでGitLab Runnerを設定する](#configure-gitlab-runner-with-the-helm-chart)を参照してください。 - GitLab Runner Helmチャートの特定バージョンをインストールするには、`helm install`コマンドに`--version `を追加します。任意のバージョンのチャートをインストールできますが、新しい`values.yml`には古いバージョンのチャートとの互換性がない場合があります。 ### 使用可能なGitLab Runner Helmチャートのバージョンを確認する {#check-available-gitlab-runner-helm-chart-versions} HelmチャートとGitLab Runnerのバージョニング方法は異なります。この2つの間のバージョンマッピングを確認するには、ご使用のHelmのバージョンに対応するコマンドを実行します: ```shell # For Helm 2 helm search -l gitlab/gitlab-runner # For Helm 3 helm search repo -l gitlab/gitlab-runner ``` 出力の例は次のとおりです: ```plaintext NAME CHART VERSION APP VERSION DESCRIPTION gitlab/gitlab-runner 0.64.0 16.11.0 GitLab Runner gitlab/gitlab-runner 0.63.0 16.10.0 GitLab Runner gitlab/gitlab-runner 0.62.1 16.9.1 GitLab Runner gitlab/gitlab-runner 0.62.0 16.9.0 GitLab Runner gitlab/gitlab-runner 0.61.3 16.8.1 GitLab Runner gitlab/gitlab-runner 0.61.2 16.8.0 GitLab Runner ... ``` ## Helmチャートを使用してGitLab Runnerをアップグレードする {#upgrade-gitlab-runner-with-the-helm-chart} 前提要件: - GitLab Runnerチャートをインストールしていること。 - GitLabでRunnerを一時停止していること。これにより、[完了時の認証エラー](../faq/_index.md#helm-chart-error--unauthorized)など、ジョブで発生する問題を回避できます。 - すべてのジョブが完了していることを確認していること。 設定を変更するか、チャートを更新するには、必要に応じてパラメータを変更して`helm upgrade`を使用します: ```shell helm upgrade --namespace -f gitlab/gitlab-runner ``` - ``: GitLab RunnerをインストールしたKubernetesネームスペース。 - ``: カスタム設定を含む値ファイルのパス。作成するには、[HelmチャートでGitLab Runnerを設定する](#configure-gitlab-runner-with-the-helm-chart)を参照してください。 - ``: チャートをインストールしたときに付けた名前。インストールセクションの例では`gitlab-runner`という名前が付けられています。 - GitLab Runner Helmチャートの最新バージョンではなく特定バージョンに更新するには、`helm upgrade`コマンドに`--version `を追加します。 ## Helmチャートを使用してGitLab Runnerをアンインストールする {#uninstall-gitlab-runner-with-the-helm-chart} GitLab Runnerをアンインストールするには、次の手順に従います: 1. GitLabでRunnerを一時停止し、すべてのジョブが完了していることを確認します。これにより、[完了時の認証エラー](../faq/_index.md#helm-chart-error--unauthorized)など、ジョブに関連する問題を回避できます。 1. このコマンドを実行します(必要に応じて変更します): ```shell helm delete --namespace ``` - ``は、GitLab RunnerをインストールしたKubernetesネームスペースです。 - ``は、チャートをインストールしたときに付けた名前です。このページの[インストールセクション](#install-gitlab-runner-with-the-helm-chart)では、これは`gitlab-runner`でした。 ================================================ FILE: docs-locale/ja-jp/install/kubernetes_helm_chart_configuration.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner Helm Chartを設定する --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} オプションの設定をGitLab Runner Helmチャートに追加できます。 ## 設定テンプレートでキャッシュを使用する {#use-the-cache-with-a-configuration-template} 設定テンプレートでキャッシュを使用するには、`values.yaml`で次の変数を設定します: - `runners.cache.secretName`: オブジェクトストレージプロバイダーのシークレット名。オプションは、`s3access`、`gcsaccess`、`google-application-credentials`、または`azureaccess`です。 - `runners.config`: TOML形式の[キャッシュ](../configuration/advanced-configuration.md#the-runnerscache-section)に関するその他の設定。 ### Amazon S3 {#amazon-s3} [静的認証情報を使用するAmazon S3](https://aws.amazon.com/blogs/security/wheres-my-secret-access-key/)を設定するには、次の手順に従います: 1. 次の例を`values.yaml`に追加します。必要に応じて値を変更してください: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "s3" Path = "runner" Shared = true [runners.cache.s3] ServerAddress = "s3.amazonaws.com" BucketName = "my_bucket_name" BucketLocation = "eu-west-1" Insecure = false AuthenticationType = "access-key" cache: secretName: s3access ``` 1. `accesskey`と`secretkey`を含むKubernetesのシークレット`s3access`を作成します: ```shell kubectl create secret generic s3access \ --from-literal=accesskey="YourAccessKey" \ --from-literal=secretkey="YourSecretKey" ``` ### Google Cloud Storage(GCS) {#google-cloud-storage-gcs} Google Cloud Storageは、静的な認証情報を使用して複数の方法で設定できます。 #### 直接設定された静的認証情報 {#static-credentials-directly-configured} [アクセスIDとプライベートキーを含む](../configuration/advanced-configuration.md#the-runnerscache-section)認証情報を使用してGCSを設定するには、次の手順に従います: 1. 次の例を`values.yaml`に追加します。必要に応じて値を変更してください: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "gcs" Path = "runner" Shared = true [runners.cache.gcs] BucketName = "runners-cache" cache: secretName: gcsaccess ``` 1. `gcs-access-id`と`gcs-private-key`を含むKubernetesのシークレット`gcsaccess`を作成します: ```shell kubectl create secret generic gcsaccess \ --from-literal=gcs-access-id="YourAccessID" \ --from-literal=gcs-private-key="YourPrivateKey" ``` #### GCPからダウンロードしたJSONファイル内の静的認証情報 {#static-credentials-in-a-json-file-downloaded-from-gcp} Google Cloud Platformからダウンロードした[JSONファイル内の認証情報を使用してGCSを設定する](../configuration/advanced-configuration.md#the-runnerscache-section)には、次の手順に従います: 1. 次の例を`values.yaml`に追加します。必要に応じて値を変更してください: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "gcs" Path = "runner" Shared = true [runners.cache.gcs] BucketName = "runners-cache" cache: secretName: google-application-credentials secrets: - name: google-application-credentials ``` 1. `google-application-credentials`という名前のKubernetesのシークレットを作成し、このシークレットを含むJSONファイルを読み込みます。必要に応じてパスを変更します: ```shell kubectl create secret generic google-application-credentials \ --from-file=gcs-application-credentials-file=./PATH-TO-CREDENTIALS-FILE.json ``` ### Azure {#azure} [Azure Blob Storageを設定する](../configuration/advanced-configuration.md#the-runnerscacheazure-section)には、次の手順に従います: 1. 次の例を`values.yaml`に追加します。必要に応じて値を変更してください: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [runners.cache] Type = "azure" Path = "runner" Shared = true [runners.cache.azure] ContainerName = "CONTAINER_NAME" StorageDomain = "blob.core.windows.net" cache: secretName: azureaccess ``` 1. `azure-account-name`と`azure-account-key`を含むKubernetesのシークレット`azureaccess`を作成します: ```shell kubectl create secret generic azureaccess \ --from-literal=azure-account-name="YourAccountName" \ --from-literal=azure-account-key="YourAccountKey" ``` Helmチャートのキャッシュの詳細については、[`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml)を参照してください。 ### 永続ボリュームクレーム {#persistent-volume-claim} どのオブジェクトストレージオプションも動作しない場合は、キャッシュに永続ボリュームクレーム(PVC)を使用できます。 PVCを使用するようにキャッシュを設定するには、次のようにします: 1. ジョブポッドが実行されるネームスペースで[PVCを作成](https://kubernetes.io/docs/concepts/storage/persistent-volumes/#persistentvolumeclaims)します。 {{< alert type="note" >}} 複数のジョブポッドが同じキャッシュPVCにアクセスできるようにする場合は、`ReadWriteMany`アクセスモードにする必要があります。 {{< /alert >}} 1. PVCを`/cache`ディレクトリにマウントします: ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" [[runners.kubernetes.volumes.pvc]] name = "cache-pvc" mount_path = "/cache" ``` ## RBACサポートを有効にする {#enable-rbac-support} クラスターでRBAC(ロールベースのアクセス制御)が有効になっている場合、このチャートにより作成されるチャート独自サービスアカウントや[自分で作成するサービスアカウント](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#service-account-permissions)を使用することができます。 - チャートにサービスアカウントを作成させるには、`rbac.create`をtrueに設定します: ```yaml rbac: create: true ``` - 既存のサービスアカウントを使用するには、`serviceAccount.name`を設定します: ```yaml rbac: create: false serviceAccount: create: false name: your-service-account ``` ## Runnerの最大並行処理を制御する {#control-maximum-runner-concurrency} Kubernetesにデプロイされた1つのRunnerは、追加のRunnerポッドを開始することで、複数のジョブを並列実行できます。一度に実行可能なポッドの最大数を変更するには、[`concurrent`設定](../configuration/advanced-configuration.md#the-global-section)を編集します。デフォルトは`10`です: ```yaml ## Configure the maximum number of concurrent jobs ## ref: https://docs.gitlab.com/runner/configuration/advanced-configuration/#the-global-section ## concurrent: 10 ``` この設定の詳細については、GitLab Runnerの高度な設定のドキュメントの[グローバルセクション](../configuration/advanced-configuration.md#the-global-section)を参照してください。 ## GitLab RunnerでDocker-in-Dockerコンテナを実行する {#run-docker-in-docker-containers-with-gitlab-runner} GitLab RunnerでDocker-in-Dockerコンテナを使用するには、次のようにします: - 有効にするには、[Runnerに特権コンテナを使用する](#use-privileged-containers-for-the-runners)を参照してください。 - Docker-in-Dockerの実行方法については、[GitLab Runnerのドキュメント](../executors/kubernetes/_index.md#using-docker-in-builds)を参照してください。 ## Runnerに特権コンテナを使用する {#use-privileged-containers-for-the-runners} GitLab CI/CDジョブでDocker実行可能ファイルを使用するには、特権コンテナを使用するようにRunnerを設定します。 前提要件: - リスクを理解していること。リスクについての説明は[GitLab CI/CD Runnerドキュメント](../executors/kubernetes/_index.md#using-docker-in-builds)に記載されています。 - GitLab RunnerインスタンスがGitLabの特定のプロジェクトに登録されており、そのCI/CDジョブを信頼していること。 `values.yaml`で特権モードを有効にするには、次の行を追加します: ```yaml runners: config: | [[runners]] [runners.kubernetes] # Run all containers with the privileged flag enabled. privileged = true ... ``` 詳細については、[`[runners.kubernetes]`](../configuration/advanced-configuration.md#the-runnerskubernetes-section)セクションに関する高度な設定の情報を参照してください。 ## プライベートレジストリのイメージを使用する {#use-an-image-from-a-private-registry} プライベートレジストリのイメージを使用するには、`imagePullSecrets`を構成します。 1. CI/CDジョブに使用するKubernetesネームスペースに1つ以上のシークレットを作成します。このコマンドは、`image_pull_secrets`で機能するシークレットを作成します: ```shell kubectl create secret docker-registry \ --namespace \ --docker-server="https://" \ --docker-username="" \ --docker-password="" ``` 1. GitLab Runner Helm Chartバージョン0.53.x以降では、`config.toml`で`runners.config`に指定されているテンプレートからの`image_pull_secret`を設定します: ```yaml runners: config: | [[runners]] [runners.kubernetes] ## Specify one or more imagePullSecrets ## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/ ## image_pull_secrets = [your-image-pull-secret] ``` 詳細については、Kubernetesドキュメントの[Pull an image from a private registry](https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/)を参照してください。 1. GitLab Runner Helmチャートバージョン0.52以前の場合は、`values.yaml`で`runners.imagePullSecrets`の値を設定します。この値を設定すると、コンテナは`--kubernetes-image-pull-secrets ""`をイメージエントリポイントスクリプトに追加します。これにより、Kubernetes executorの`config.toml`の設定で`image_pull_secrets`パラメータを設定する必要がなくなります。 ```yaml runners: imagePullSecrets: [your-image-pull-secret] ``` {{< alert type="note" >}} `imagePullSecrets`の値には、`name`タグがプレフィックスとして付加されていません。これはKubernetesリソースでの慣例です。1つのレジストリ認証情報のみを使用する場合でも、この値には1つ以上のシークレット名の配列が必要です。 {{< /alert >}} `imagePullSecrets`の作成方法の詳細については、Kubernetesドキュメントの[Pull an Image from a Private Registry](https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/)を参照してください。 {{< alert type="note" >}} ジョブポッドの作成時に、GitLab Runnerは自動的にイメージアクセスを次の2つのステップで処理します: 1. GitLab Runnerは、既存のDocker認証情報をKubernetes secretsに変換し、レジストリからイメージをプルできるようにします。手動で設定されたimagePullSecretsがクラスター内に実際に存在するかどうかも確認します。静的に定義された認証情報、認証情報ストア、または認証情報ヘルパーの詳細については、[プライベートコンテナイメージからのイメージへのアクセス](https://docs.gitlab.com/ci/docker/using_docker_images/#access-an-image-from-a-private-container-registry)を参照してください。 1. GitLab Runnerはジョブポッドを作成し、2種類の認証情報(`imagePullSecrets`と変換されたDocker認証情報)をその順にアタッチします。 Kubernetesがコンテナイメージをプルする必要がある場合、機能するものがみつかるまで、認証情報を1つずつ試します。 {{< /alert >}} ## カスタム証明書を使用してGitLabにアクセスする {#access-gitlab-with-a-custom-certificate} カスタム証明書を使用するには、GitLab Runner Helmチャートに[Kubernetesシークレット](https://kubernetes.io/docs/concepts/configuration/secret/)を提供します。このシークレットは、コンテナの`/home/gitlab-runner/.gitlab-runner/certs`ディレクトリに追加されます: 1. [証明書を準備する](#prepare-your-certificate) 1. [Kubernetesのシークレットを作成する](#create-a-kubernetes-secret) 1. [チャートにシークレットを提供する](#provide-the-secret-to-the-chart) ### 証明書を準備する {#prepare-your-certificate} Kubernetesシークレットの各キー名は、ディレクトリ内のファイル名として使用されます。ファイルの内容は、キーに関連付けられた値です: - 使用するファイル名の形式は`.crt`である必要があります。たとえば`gitlab.your-domain.com.crt`などです。 - 中間証明書を同じファイル内のサーバー証明書に連結します。 - 使用するホスト名は、証明書が登録されているホスト名である必要があります。 ### Kubernetesのシークレットを作成する {#create-a-kubernetes-secret} [自動生成された自己署名ワイルドカード証明書](https://docs.gitlab.com/charts/installation/tls/#option-4-use-auto-generated-self-signed-wildcard-certificate)の手法を使用してGitLab Helmチャートをインストールした場合、シークレットが作成されています。 自動生成された自己署名ワイルドカード証明書を使用してGitLab Helmチャートをインストールしなかった場合は、シークレットを作成します。以下のコマンドは、証明書をシークレットとしてKubernetesに保存し、ファイルとしてGitLab Runnerコンテナに提示します。 - 証明書が現在のディレクトリに含まれており、``形式に従っている場合は、必要に応じてこのコマンドを変更します: ```shell kubectl create secret generic \ --namespace \ --from-file= ``` - ``: GitLab RunnerをインストールするKubernetesネームスペース。 - ``: Kubernetesシークレットリソース名(`gitlab-domain-cert`など)。 - ``: 現在のディレクトリ内にある、シークレットにインポートする証明書のファイル名。 - 証明書が別のディレクトリにある場合、または``形式に従っていない場合は、ターゲットとして使用するファイル名を指定する必要があります: ```shell kubectl create secret generic \ --namespace \ --from-file== ``` - ``は、Runnerコンテナに提示される証明書ファイルの名前です(`gitlab.hostname.crt`など)。 - ``は、シークレットにインポートする証明書のファイル名です。これは、現在のディレクトリを基準とした相対的な名前です。例: `cert-directory/my-gitlab-certificate.crt`。 ### チャートにシークレットを提供する {#provide-the-secret-to-the-chart} `values.yaml`で、`certsSecretName`を同じネームスペース内のKubernetesシークレットオブジェクトのリソース名に設定します。これにより、GitLab Runnerが使用するカスタム証明書を渡すことができます。前述の例では、リソース名は`gitlab-domain-cert`でした: ```yaml certsSecretName: ``` 詳細については、GitLabサーバーを対象とする[自己署名証明書のサポートされているオプション](../configuration/tls-self-signed.md#supported-options-for-self-signed-certificates-targeting-the-gitlab-server)を参照してください。 ## ポッドラベルをCI環境変数キーに設定する {#set-pod-labels-to-ci-environment-variable-keys} `values.yaml`ファイルでは、環境変数をポッドラベルとして使用できません。詳細については、[環境変数キーをポッドラベルとして設定できない](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/173)を参照してください。一時的な解決策として、[このイシューに記載されている回避策](https://gitlab.com/gitlab-org/charts/gitlab-runner/-/issues/173#note_351057890)を使用してください。 ## Ubuntuベースの`gitlab-runner` Dockerイメージに切り替える {#switch-to-the-ubuntu-based-gitlab-runner-docker-image} デフォルトでは、GitLab Runner Helmチャートは、`musl libc`を使用する`gitlab/gitlab-runner`イメージのAlpineバージョンを使用します。`glibc`を使用するUbuntuベースのイメージに切り替える必要がある場合があります。 そのためには、`values.yaml`ファイルで次の値を使用してイメージを指定します: ```yaml # Specify the Ubuntu image, and set the version. You can also use the `ubuntu` or `latest` tags. image: gitlab/gitlab-runner:v17.3.0 # Update the security context values to the user ID in the Ubuntu image securityContext: fsGroup: 999 runAsUser: 999 ``` ## 非rootユーザーで実行する {#run-with-non-root-user} デフォルトの場合、非rootユーザーではGitLab Runnerのイメージが動作しません。[GitLab Runner UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766421)イメージと[GitLab Runner Helper UBI](https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/container_registry/1766433)イメージは、このような状況に対応して設計されています。 これらのイメージを使用するには、`values.yaml`でGitLab RunnerイメージとGitLab Runner Helperイメージを変更します: ```yaml image: registry: registry.gitlab.com image: gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-ocp tag: v16.11.0 securityContext: runAsNonRoot: true runAsUser: 999 runners: config: | [[runners]] [runners.kubernetes] helper_image = "registry.gitlab.com/gitlab-org/ci-cd/gitlab-runner-ubi-images/gitlab-runner-helper-ocp:x86_64-v16.11.0" [runners.kubernetes.pod_security_context] run_as_non_root = true run_as_user = 59417 ``` `run_as_user`は`nonroot`ユーザーのユーザーID(59417)を参照していますが、イメージはどのユーザーIDでも機能します。このユーザーIDがルートグループの一部であることが重要です。ルートグループの一部であっても、特定の特権が付与されるわけではありません。 ## FIPS準拠のGitLab Runnerを使用する {#use-a-fips-compliant-gitlab-runner} [FIPS準拠のGitLab Runner](_index.md#fips-compliant-gitlab-runner)を使用するには、`values.yaml`でGitLab RunnerイメージとHelperイメージを変更します: ```yaml image: registry: docker.io image: gitlab/gitlab-runner tag: ubi-fips runners: config: | [[runners]] [runners.kubernetes] helper_image_flavor = "ubi-fips" ``` ## 設定テンプレートを使用する {#use-a-configuration-template} [KubernetesでGitLab Runnerビルドポッドの動作を設定する](../executors/kubernetes/_index.md#configuration-settings)には、[設定テンプレートファイル](../register/_index.md#register-with-a-configuration-template)を使用します。設定テンプレートでは、Helmチャートと特定のRunner設定オプションを共有せずに、Runnerの任意のフィールドを設定できます。たとえば、以下のデフォルト設定は`chart`リポジトリの[`values.yaml`ファイルにあります](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml): ```yaml runners: config: | [[runners]] [runners.kubernetes] image = "ubuntu:22.04" ``` `config.toml`が`values.yaml`に埋め込まれているため、`config:`セクションの値はTOMLを使用する必要があります(` = `ではなく`: `)。 executor固有の設定については、[`values.yaml`](https://gitlab.com/gitlab-org/charts/gitlab-runner/blob/main/values.yaml)ファイルを参照してください。 ================================================ FILE: docs-locale/ja-jp/install/kubernetes_troubleshooting.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runner Helmチャートのトラブルシューティング --- ## エラー: `Job failed (system failure): secrets is forbidden` {#error-job-failed-system-failure-secrets-is-forbidden} 次のエラーが表示された場合は、[RBACサポートを有効にする](kubernetes_helm_chart_configuration.md#enable-rbac-support)と、問題を解決できます: ```plaintext Using Kubernetes executor with image alpine ... ERROR: Job failed (system failure): secrets is forbidden: User "system:serviceaccount:gitlab:default" cannot create resource "secrets" in API group "" in the namespace "gitlab" ``` ## エラー: `Unable to mount volumes for pod` {#error-unable-to-mount-volumes-for-pod} 必要なシークレットのマウントボリュームに失敗する場合は、登録トークンまたはRunnerトークンがシークレットに保存されていることを確認してください。 ## Google Cloud Storageへの低速なアーティファクトアップロード {#slow-artifact-uploads-to-google-cloud-storage} Google Cloud Storageへのアーティファクトアップロードは、runnerヘルパーポッドがCPUバウンドになるため、パフォーマンスが低下する可能性があります(帯域幅レートが遅くなる)。この問題を軽減するには、ヘルパーポッドのCPU制限を増やしてください: ```yaml runners: config: | [[runners]] [runners.kubernetes] helper_cpu_limit = "250m" ``` 詳細については、[issue 28393](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28393#note_722733798)を参照してください。 ## エラー: `PANIC: creating directory: mkdir /nonexistent: permission denied` {#error-panic-creating-directory-mkdir-nonexistent-permission-denied} このエラーを解決するには、[UbuntuベースのGitLab Runner Dockerイメージ](kubernetes_helm_chart_configuration.md#switch-to-the-ubuntu-based-gitlab-runner-docker-image)に切り替えてください。 ## エラー: `invalid header field for "Private-Token"` {#error-invalid-header-field-for-private-token} `gitlab-runner-secret`の`runner-token`値が、末尾に改行文字(`\n`)を使用してbase64エンコードされている場合、このエラーが表示されることがあります: ```plaintext couldn't execute POST against "https:/gitlab.example.com/api/v4/runners/verify": net/http: invalid header field for "Private-Token" ``` この問題を解決するには、改行(`\n`)がトークン値に追加されていないことを確認してください。例: `echo -n | base64`。 ## エラー: `FATAL: Runner configuration is reserved` {#error-fatal-runner-configuration-is-reserved} GitLab Runner Helmチャートのインストール後、ポッドログに次のエラーが表示されることがあります: ```plaintext FATAL: Runner configuration other than name and executor configuration is reserved (specifically --locked, --access-level, --run-untagged, --maximum-timeout, --paused, --tag-list, and --maintenance-note) and cannot be specified when registering with a runner authentication token. This configuration is specified on the GitLab server. Please try again without specifying any of those arguments ``` このエラーは、認証トークンを使用し、シークレットを介してトークンを提供する場合に発生します。これを修正するには、values YAMLファイルを確認し、非推奨の値を使用していないことを確認してください。どの値が非推奨になっているかの詳細については、[GitLab RunnerをHelmチャートでインストールする](https://docs.gitlab.com/ci/runners/new_creation_workflow/#installing-gitlab-runner-with-helm-chart)を参照してください。 ================================================ FILE: docs-locale/ja-jp/install/linux-manually.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GNU/LinuxにGitLab Runnerを手動でインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、`deb`パッケージ、`rpm`パッケージ、またはバイナリファイルを使用して手動でインストールできます。この方法は、以下の状況で最後の手段として使用してください: - GitLab Runnerをインストールするためにdeb/rpmリポジトリを使用できない場合 - ご使用のGNU/Linux OSがサポートされていない場合 ## 前提要件 {#prerequisites} GitLab Runnerを手動で実行する前に: - Docker executorを使用する場合は、最初にDockerをインストールしてください。 - 一般的な問題と解決策については、FAQセクションを確認してください。 ## deb/rpmパッケージを使用する {#using-debrpm-package} `deb`パッケージまたは`rpm`パッケージを使用して、GitLab Runnerをダウンロードしてインストールできます。 ### ダウンロード {#download} システムに対応するパッケージをダウンロードするには、次の手順に従います: 1. 最新のファイル名とオプションをで確認します。 1. パッケージマネージャーまたはアーキテクチャに対応するRunner-helperバージョンをダウンロードします。 1. GitLab Runner bleeding edgeリリースの[その他のタグ付きリリースのダウンロード](bleeding-edge.md#download-any-other-tagged-release)に関するドキュメントの説明に従って、バージョンを選択し、バイナリをダウンロードします。 たとえば、DebianまたはUbuntuの場合は次のようになります: ```shell # Replace ${arch} with any of the supported architectures, e.g. amd64, arm, arm64 # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner-helper-images.deb" curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/deb/gitlab-runner_${arch}.deb" ``` たとえば、CentOSまたはRed Hat Enterprise Linuxの場合は次のようになります: ```shell # Replace ${arch} with any of the supported architectures, e.g. amd64, arm, arm64 # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner-helper-images.rpm" curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner_${arch}.rpm" ``` たとえば、RHEL上のFIPS準拠のGitLab Runnerの場合は次のようになります: ```shell # Currently only amd64 is a supported arch # The FIPS compliant GitLab Runner version continues to include the helper images in one package. # A full list of architectures can be found here https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/index.html curl -LJO "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/rpm/gitlab-runner_amd64-fips.rpm" ``` ### インストール {#install} 1. ご使用のシステムに対応するパッケージを次のようにインストールします。 たとえば、DebianまたはUbuntuの場合は次のようになります: ```shell dpkg -i gitlab-runner-helper-images.deb gitlab-runner_.deb ``` たとえば、CentOSまたはRed Hat Enterprise Linuxの場合は次のようになります: ```shell dnf install -y gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ### アップグレード {#upgrade} ご使用のシステムに対応する最新パッケージをダウンロードし、次のようにしてアップグレードします: たとえば、DebianまたはUbuntuの場合は次のようになります: ```shell dpkg -i gitlab-runner_.deb ``` たとえば、CentOSまたはRed Hat Enterprise Linuxの場合は次のようになります: ```shell dnf install -y gitlab-runner-helper-images.rpm gitlab-runner_.rpm ``` ## バイナリファイルを使用する {#using-binary-file} バイナリファイルを使用して、GitLab Runnerをダウンロードしてインストールできます。 ### インストール {#install-1} 1. ご使用のシステムに対応するバイナリのいずれかをダウンロードします: ```shell # Linux x86-64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64" # Linux x86 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-386" # Linux arm sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-arm" # Linux arm64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-arm64" # Linux s390x sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-s390x" # Linux ppc64le sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-ppc64le" # Linux riscv64 sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-riscv64" # Linux x86-64 FIPS Compliant sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64-fips" ``` [Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. 実行のための権限を付与します: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. GitLab CIユーザーを作成します: ```shell sudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash ``` 1. インストールしてサービスとして実行します: ```shell sudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner sudo gitlab-runner start ``` rootの`$PATH`に`/usr/local/bin/`があることを確認してください。ない場合は、`command not found`エラーが発生する可能性があります。または、`gitlab-runner`を`/usr/bin/`のような別の場所にインストールすることもできます。 {{< alert type="note" >}} `gitlab-runner`がインストールされ、サービスとして実行されている場合、これはrootとして実行されますが、ジョブは`install`コマンドで指定されたユーザーとして実行します。つまり、キャッシュやアーティファクトなどの一部のジョブ機能は`/usr/local/bin/gitlab-runner`コマンドを実行する必要があります。したがって、ジョブ実行ユーザーが実行可能ファイルにアクセスできる必要があります。 {{< /alert >}} ### アップグレード {#upgrade-1} 1. サービスを停止します(以前と同様に、管理者権限でのコマンドプロンプトが必要です): ```shell sudo gitlab-runner stop ``` 1. GitLab Runner実行可能ファイルを置き換えるバイナリをダウンロードします。次に例を示します: ```shell sudo curl -L --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-linux-amd64" ``` [Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. 実行のための権限を付与します: ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. サービスを開始します: ```shell sudo gitlab-runner start ``` ## 次の手順 {#next-steps} インストール後、[runnerを登録](../register/_index.md)してセットアップを完了します。 Runnerバイナリには、事前ビルド済みのヘルパーイメージが含まれていません。これらのコマンドを使用して、対応するバージョンのヘルパーイメージアーカイブをダウンロードし、適切な場所にコピーできます: ```shell mkdir -p /usr/local/bin/out/helper-images cd /usr/local/bin/out/helper-images ``` アーキテクチャに適したヘルパーイメージを選択します:
Ubuntuヘルパーイメージ ```shell # Linux x86-64 ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-x86_64.tar.xz # Linux x86-64 ubuntu pwsh wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-x86_64-pwsh.tar.xz # Linux s390x ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-s390x.tar.xz # Linux ppc64le ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-ppc64le.tar.xz # Linux arm64 ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-arm64.tar.xz # Linux arm ubuntu wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-ubuntu-arm.tar.xz # Linux x86-64 ubuntu specific version - v17.10.0 wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/v17.10.0/helper-images/prebuilt-ubuntu-x86_64.tar.xz ```
alpineヘルパーイメージ ```shell # Linux x86-64 alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-x86_64.tar.xz # Linux x86-64 alpine pwsh wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-x86_64-pwsh.tar.xz # Linux s390x alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-s390x.tar.xz # Linux riscv64 alpine edge wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-edge-riscv64.tar.xz # Linux arm64 alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-arm64.tar.xz # Linux arm alpine wget https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/helper-images/prebuilt-alpine-arm.tar.xz ```
## 追加情報 {#additional-resources} - [Docker executorドキュメント](../executors/docker.md) - [Dockerをインストールします](https://docs.docker.com/engine/install/centos/#install-docker-ce) - [他のGitLab Runnerバージョンをダウンロード](bleeding-edge.md#download-any-other-tagged-release) - [FIPS準拠のGitLab Runner情報](_index.md#fips-compliant-gitlab-runner) - [GitLab Runner FAQ](../faq/_index.md)を参照してください。 - [deb/rpmリポジトリインストール](linux-repository.md) ================================================ FILE: docs-locale/ja-jp/install/linux-repository.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: パッケージマネージャーを使用して、GitLabリポジトリからGitLab Runnerをインストールします。 title: 公式のGitLabリポジトリを使用してGitLab Runnerをインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerをインストールするには、[GitLabリポジトリ](https://packages.gitlab.com/runner/gitlab-runner)のパッケージを使用できます。 ## サポートされているディストリビューション {#supported-distributions} GitLabでは、[Packagecloud](https://packages.gitlab.com/runner/gitlab-runner/)でサポートされている以下のバージョンのLinuxディストリビューションのパッケージを提供しています。新しいOSディストリビューションリリースに対応する新しいRunner `deb`または`rpm`パッケージは、Packagecloudでサポートされている場合、自動的に追加されます。 ### Debベースのディストリビューション {#deb-based-distributions} | ディストリビューション | サポート対象バージョン | |--------------|--------------------| | Debian | Duke, Forky, Trixie, Bookworm, Bullseye | | LinuxMint | Xia, Wilma, Virginia, Victoria, Vera, Vanessa | | Raspbian | Duke, Forky, Trixie, Bookworm, Bullseye | | Ubuntu | Questing, Noble, Jammy, Focal, Bionic | ### RPMベースのディストリビューション {#rpm-based-distributions} | ディストリビューション | サポート対象バージョン | |--------------|--------------------| | Amazon Linux | 2025, 2023, 2 | | Red Hat Enterprise Linux | 10、9、8、7 | | Fedora | 43, 42 | | Oracle Linux | 10、9、8、7 | | openSUSE | 16.0、15.6 | | SUSE Linux Enterprise Server | 15.7、15.6、15.5、15.4、12.5 | セットアップによっては、他のDebianまたはRPMベースのディストリビューションもサポートされている場合があります。これは、サポートされているGitLab Runnerディストリビューションからの派生であり、互換性のあるパッケージリポジトリを持つディストリビューションを指します。たとえば、DeepinはDebianの派生ディストリビューションです。そのため、Runnerの`deb`パッケージはDeepinにインストールして実行できるはずです。他のLinuxディストリビューションでも[GitLab Runnerをバイナリとしてインストール](linux-manually.md#using-binary-file)できる場合があります。 > [!note] > リストにないディストリビューションのパッケージは、当社のパッケージリポジトリからは入手できません。これらは、S3バケットからRPMまたはDEBパッケージをダウンロードして、手動で[インストール](linux-manually.md#using-debrpm-package)できます。 ## GitLab Runnerをインストールする {#install-gitlab-runner} GitLab Runnerをインストールするには、次の手順に従います: 1. 公式GitLabリポジトリを追加します: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} 1. リポジトリ設定スクリプトをダウンロードします: ```shell curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.deb.sh" -o script.deb.sh ``` 1. 実行する前にスクリプトを検査します: ```shell less script.deb.sh ``` 1. スクリプトを実行します: ```shell sudo bash script.deb.sh ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} 1. リポジトリ設定スクリプトをダウンロードします: ```shell curl -L "https://packages.gitlab.com/install/repositories/runner/gitlab-runner/script.rpm.sh" -o script.rpm.sh ``` 1. 実行する前にスクリプトを検査します: ```shell less script.rpm.sh ``` 1. スクリプトを実行します: ```shell sudo bash script.rpm.sh ``` {{< /tab >}} {{< /tabs >}} 1. 最新バージョンのGitLab Runnerをインストールするか、次のステップに進んで特定のバージョンをインストールします: > [!note] `skel`ディレクトリの使用は、[`No such file or directory`ジョブの失敗](#error-no-such-file-or-directory-job-failures)を防ぐために、デフォルトで無効になっています。 {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell sudo apt install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell sudo yum install gitlab-runner or sudo dnf install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} > [!note] FIPS 140-2に準拠したGitLab Runnerのバージョンは、RHELディストリビューションで利用可能です。このバージョンをインストールするには、パッケージ名として`gitlab-runner`の代わりに`gitlab-runner-fips`を使用します。 1. 特定のバージョンのGitLab Runnerをインストールするには、次のようにします: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} > [!note] `gitlab-runner`バージョン`v17.7.1`以降では、最新のバージョンではない特定の`gitlab-runner`のバージョンをインストールする場合、そのバージョンに必要な`gitlab-runner-helper-packages`を明示的にインストールする必要があります。この要件は、`apt`/`apt-get`の制限により存在しています。 ```shell apt-cache madison gitlab-runner sudo apt install gitlab-runner=17.7.1-1 gitlab-runner-helper-images=17.7.1-1 ``` 特定バージョンの`gitlab-runner`をインストールするときに、同じバージョンの`gitlab-runner-helper-images`をインストールしないと、次のようなエラーが発生する可能性があります: ```shell sudo apt install gitlab-runner=17.7.1-1 ... The following packages have unmet dependencies: gitlab-runner : Depends: gitlab-runner-helper-images (= 17.7.1-1) but 17.8.3-1 is to be installed E: Unable to correct problems, you have held broken packages. ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell yum list gitlab-runner --showduplicates | sort -r sudo yum install gitlab-runner-17.2.0-1 ``` {{< /tab >}} {{< /tabs >}} 1. [Runnerを登録します](../register/_index.md)。 上記の手順を完了すると、Runnerを起動してプロジェクトで使用できるようになります。 [FAQ](../faq/_index.md)セクションを参照してください。このセクションでは、GitLab Runnerに関する最も一般的な問題について説明しています。 ## ヘルパーイメージパッケージ {#helper-images-package} `gitlab-runner-helper-images`パッケージには、GitLab Runnerがジョブの実行中に使用する、構築済みのヘルパーコンテナイメージが含まれています。これらのイメージは、リポジトリのクローンを作成し、アーティファクトをアップロードし、キャッシュを管理するために必要なツールとユーティリティを提供します。 `gitlab-runner-helper-images`パッケージには、次のオペレーティングシステムとアーキテクチャ用のヘルパーイメージが含まれています: Alpineベースのイメージ(最新): - `alpine-arm` - `alpine-arm64` - `alpine-riscv64` - `alpine-s390x` - `alpine-x86_64` - `alpine-x86_64-pwsh` Ubuntuベースのイメージ(24.04): - `ubuntu-arm` - `ubuntu-arm64` - `ubuntu-ppc64le` - `ubuntu-s390x` - `ubuntu-x86_64` - `ubuntu-x86_64-pwsh` ### ヘルパーイメージの自動ダウンロード {#automatic-helper-image-download} 特定のオペレーティングシステムとアーキテクチャの組み合わせ用のヘルパーイメージがホストシステムで使用できない場合、GitLab Runnerは必要に応じて必要なイメージを自動的にダウンロードします。`gitlab-runner-helper-images package`に含まれていないアーキテクチャの場合、手動インストールは必要ありません。この自動ダウンロードにより、手動での操作や個別のパッケージインストールを行わなくても、Runnerは`loong64`などの追加アーキテクチャをサポートできます。 ## GitLab Runnerをアップグレードする {#upgrade-gitlab-runner} 最新バージョンのGitLab Runnerをインストールするには、次のようにします: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell sudo apt update sudo apt install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell sudo yum update sudo yum install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} ## パッケージインストールのGPG署名 {#gpg-signatures-for-package-installation} GitLab Runnerプロジェクトは、パッケージインストール方法に対して2種類のGPG署名を提供しています: - [リポジトリメタデータの署名](#repository-metadata-signing) - [パッケージの署名](#package-signing) ### リポジトリメタデータの署名 {#repository-metadata-signing} リモートリポジトリからダウンロードしたパッケージ情報が信頼できるものであることを検証するために、パッケージマネージャーはリポジトリメタデータの署名を使用します。 この署名は、`apt-get update`などのコマンドを使用するときに検証されます。このため、**パッケージのダウンロードとインストールが行われる前に**、利用可能なパッケージに関する情報が更新されます。検証に失敗した場合、パッケージマネージャーはメタデータを拒否します。つまり、署名の不一致の原因となった問題が見つかって解決されるまで、リポジトリからパッケージをダウンロードしてインストールすることはできません。 パッケージメタデータ署名の検証に使用されるGPG公開キーは、上記の手順で最初に行われたインストール時に自動的にインストールされます。今後のキーの更新では、既存のユーザーが新しいキーを手動でダウンロードしてインストールする必要があります。 でホストされているすべてのプロジェクトに対して1つのキーを使用します。使用されているキーの詳細は、[Linuxパッケージのドキュメント](https://docs.gitlab.com/omnibus/update/package_signatures/#package-repository-metadata-signing-key)で確認できます。このドキュメントページには、[過去に使用されたすべてのキー](https://docs.gitlab.com/omnibus/update/package_signatures/#previous-package-signing-keys)も記載されています。 ### パッケージの署名 {#package-signing} リポジトリメタデータの署名は、ダウンロードされたバージョン情報がからのものであることを証明します。パッケージ自体の整合性を証明するものではありません。リポジトリからユーザーへのメタデータ転送が影響を受けていない限り、にアップロードされたものはすべて、承認されているかどうかにかかわらず、適切に検証されます。 パッケージ署名では、各パッケージがそのビルド時に署名されます。ビルド環境と使用されているGPGキーの機密性を信頼できるようになるまで、パッケージの信頼性を検証できません。パッケージの有効な署名は、その出所が認証されており、その整合性が侵害されていないことを証明します。 パッケージ署名検証は、Debian/RPMベースのディストリビューションの一部でのみデフォルトで有効になっています。このタイプの検証を使用するには、設定の調整が必要になる場合があります。 でホストされているリポジトリごとに、パッケージ署名検証に使用されるGPGキーが異なる場合があります。GitLab Runnerプロジェクトでは、このタイプの署名に独自のキーペアを使用します。 #### RPMベースのディストリビューション {#rpm-based-distributions-1} RPM形式には、GPG署名機能の完全な実装が含まれており、この形式に基づくパッケージマネージャーと完全に統合されています。 [Linuxパッケージのドキュメント](https://docs.gitlab.com/omnibus/update/package_signatures/#rpm-based-distributions)に、RPMベースのディストリビューションのパッケージ署名検証を設定する方法に関する技術的な説明があります。GitLab Runnerでの違いは次のとおりです: - インストールする必要がある公開キーパッケージの名前は`gpg-pubkey-35dfa027-60ba0235`です。 - RPMベースのディストリビューションのリポジトリファイルの名前は、`/etc/yum.repos.d/runner_gitlab-runner.repo`(安定版リリースの場合)または`/etc/yum.repos.d/runner_unstable.repo`(不安定版リリースの場合)です。 - [パッケージ署名公開キー](#current-gpg-public-key)は、`https://packages.gitlab.com/runner/gitlab-runner/gpgkey/runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg`からインポートできます。 #### Debianベースのディストリビューション {#debian-based-distributions} `deb`形式は、公式にはパッケージ署名機能をデフォルトで備えていません。GitLab Runnerプロジェクトでは、パッケージの署名と検証に`dpkg-sig`ツールを使用します。この方法では、パッケージの手動検証のみがサポートされています。 `deb`パッケージを検証するには、次の手順に従います: 1. `dpkg-sig`をインストールします: ```shell apt update && apt install dpkg-sig ``` 1. [パッケージ署名公開キー](#current-gpg-public-key)をダウンロードしてインポートします: ```shell curl -JLO "https://packages.gitlab.com/runner/gitlab-runner/gpgkey/runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg" gpg --import runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg ``` 1. `dpkg-sig`でダウンロードしたパッケージを検証します: ```shell dpkg-sig --verify gitlab-runner_amd64.deb Processing gitlab-runner_amd64.deb... GOODSIG _gpgbuilder 931DA69CFA3AFEBBC97DAA8C6C57C29C6BA75A4E 1623755049 ``` パッケージの署名が無効であるか、無効なキー(失効したキーなど)で署名されている場合、出力は次のようになります: ```shell dpkg-sig --verify gitlab-runner_amd64.deb Processing gitlab-runner_amd64.deb... BADSIG _gpgbuilder ``` キーがユーザーのキーリングに存在しない場合、出力は次のようになります: ```shell dpkg-sig --verify gitlab-runner_amd64.v13.1.0.deb Processing gitlab-runner_amd64.v13.1.0.deb... UNKNOWNSIG _gpgbuilder 880721D4 ``` #### 現在のGPG公開キー {#current-gpg-public-key} `https://packages.gitlab.com/runner/gitlab-runner/gpgkey/runner-gitlab-runner-49F16C5CC3A0F81F.pub.gpg`からパッケージ署名に使用される現在の公開GPGキーをダウンロードできます。 | キーの属性 | 値 | |---------------|-------| | 名前 | `GitLab, Inc.` | | メール | `support@gitlab.com` | | フィンガープリント | `931D A69C FA3A FEBB C97D AA8C 6C57 C29C 6BA7 5A4E` | | 有効期限 | `2026-04-28` | > [!note] > GitLab Runnerプロジェクトでは、``バケットで利用可能なS3リリース用の`release.sha256`ファイルに署名するために、同じキーを使用します。 #### 過去のGPG公開キー {#previous-gpg-public-keys} 過去に使用されたキーを以下の表に示します。 失効したキーは、パッケージ署名検証設定から削除することを強くお勧めします。 次のキーによって作成された署名は、信頼すべきではありません。 | シリアル番号 | キーのフィンガープリント | 状態 | 有効期限 | ダウンロード(失効したキーのみ) | |---------|------------------------------------------------------|-----------|--------------|------------------------------| | 1 | `3018 3AC2 C4E2 3A40 9EFB E705 9CE4 5ABC 8807 21D4` | `revoked` | `2021-06-08` | [失効したキー](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/docs/install/gpg-keys/9CE45ABC880721D4.pub.gpg) | | 2 | `09E5 7083 F34C CA94 D541 BC58 A674 BF81 35DF A027` | `revoked` | `2023-04-26` | [失効したキー](https://gitlab.com/gitlab-org/gitlab-runner/-/blob/main/docs/install/gpg-keys/A674BF8135DFA027.pub.gpg) | ## トラブルシューティング {#troubleshooting} GitLab Runnerのインストール時に発生する問題のトラブルシューティングと解決のためのヒントを以下に示します。 ### エラー: `No such file or directory`ジョブの失敗 {#error-no-such-file-or-directory-job-failures} デフォルトのスケルトン(`skel`)ディレクトリが原因でGitLab Runnerに問題が発生し、ジョブの実行に失敗することがあります。[イシュー4449](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/4449)と[イシュー1379](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1379)を参照してください。 これを回避するために、GitLab Runnerをインストールすると、`gitlab-runner`ユーザーが作成され、デフォルトでは、ホームディレクトリはスケルトンなしで作成されます。`skel`の使用によってホームディレクトリに追加されるShell設定は、ジョブの実行を妨げる可能性があります。この設定は、前述のような予期しない問題を引き起こす可能性があります。 `skel`の回避がデフォルトの動作になる前にRunnerを作成していた場合は、次のドットファイルを削除してみてください: ```shell sudo rm /home/gitlab-runner/.profile sudo rm /home/gitlab-runner/.bashrc sudo rm /home/gitlab-runner/.bash_logout ``` `skel`ディレクトリを使用して、新しく作成された`$HOME`ディレクトリにデータを入力する必要がある場合は、Runnerをインストールする前に、`GITLAB_RUNNER_DISABLE_SKEL`変数を明示的に`false`に設定する必要があります: {{< tabs >}} {{< tab title="Debian/Ubuntu/Mint" >}} ```shell export GITLAB_RUNNER_DISABLE_SKEL=false; sudo -E apt-get install gitlab-runner ``` {{< /tab >}} {{< tab title="RHEL/CentOS/Fedora/Amazon Linux" >}} ```shell export GITLAB_RUNNER_DISABLE_SKEL=false; sudo -E yum install gitlab-runner ``` {{< /tab >}} {{< /tabs >}} ================================================ FILE: docs-locale/ja-jp/install/operator.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: Kubernetes用GitLab Operatorを使用してGitLab Runnerをインストールします。 title: GitLab Runner Operatorをインストールする --- ## Red Hat OpenShiftにインストールする {#install-on-red-hat-openshift} {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} OpenShiftのウェブコンソールでOperatorHubのstableチャネルから[GitLab Runner Operator](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator)を使用して、Red Hat OpenShift v4以降にGitLab Runnerをインストールします。インストールが完了すると、新しくデプロイされたGitLab Runnerインスタンスを使用して、GitLab CI/CDジョブを実行できます。各CI/CDジョブは、個別のポッドで実行されます。 ### 前提条件 {#prerequisites} - 管理者権限を持つOpenShift 4.xクラスター - GitLab Runner登録トークン ### OpenShift Operatorをインストールする {#install-the-openshift-operator} まず、OpenShift Operatorをインストールする必要があります。 1. OpenShift UIを開き、管理者権限を持つユーザーとしてサインインします。 1. 左側のペインで、**Operators**、**OperatorHub**の順に選択します。 1. メインペインの**All Items**の下で、キーワード`GitLab Runner`を検索します。 ![GitLab Operator](img/openshift_allitems_v13_3.png) 1. インストールするには、GitLab Runner Operatorを選択します。 1. GitLab Runner Operatorの概要ページで、**Install**を選択します。 1. Install Operatorページで、以下を実行します: 1. **Update Channel**で、**stable**を選択します。 1. **Installed Namespace**で、目的のネームスペースを選択し、**インストール**を選択します。 ![GitLab OperatorのInstallページ](img/openshift_installoperator_v13_3.png) Installed Operatorsページで、GitLab Operatorの準備ができると、ステータスが**Succeeded**に変わります。 ![GitLab Operator Install Status](img/openshift_success_v13_3.png) ## Kubernetesにインストールする {#install-on-kubernetes} {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} [OperatorHub.io](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator)のstableチャネルから[GitLab Runner Operator](https://operatorhub.io/operator/gitlab-runner-operator)を使用して、Kubernetes v1.21以降にGitLab Runnerをインストールします。インストールが完了すると、新しくデプロイされたGitLab Runnerインスタンスを使用して、GitLab CI/CDジョブを実行できます。各CI/CDジョブは、個別のポッドで実行されます。 ### 前提条件 {#prerequisites-1} - Kubernetes v1.21以降 - Cert manager v1.7.1 ### Kubernetes Operatorをインストールする {#install-the-kubernetes-operator} [OperatorHub.io](https://operatorhub.io/operator/gitlab-runner-operator)の手順に従ってください。 1. 前提条件をインストールします。 1. 右上にある**インストール**を選択し、指示に従って`olm`とOperatorをインストールします。 #### GitLab Runnerをインストールする {#install-gitlab-runner} 1. Runner認証トークンを取得します。次のいずれかの方法があります。 - [インスタンス](https://docs.gitlab.com/ci/runners/runners_scope/#create-an-instance-runner-with-a-runner-authentication-token)、[グループ](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-group-runner-with-a-runner-authentication-token)、または[プロジェクト](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token)のRunnerを作成する。 - `config.toml`ファイルの中でRunner認証トークンを見つける。Runner認証トークンのプレフィックスは`glrt-`です。 1. GitLab Runnerトークンを使用して、シークレットファイルを作成します: ```shell cat > gitlab-runner-secret.yml << EOF apiVersion: v1 kind: Secret metadata: name: gitlab-runner-secret type: Opaque # Only one of the following fields can be set. The Operator fails to register the runner if both are provided. # NOTE: runner-registration-token is deprecated and will be removed in GitLab 18.0. You should use runner-token instead. stringData: runner-token: REPLACE_ME # your project runner token # runner-registration-token: "" # your project runner secret EOF ``` 1. 以下を実行して、クラスターに`secret`を作成します: ```shell kubectl apply -f gitlab-runner-secret.yml ``` 1. カスタムリソース定義(CRD)ファイルを作成し、次の設定を含めます。 ```shell cat > gitlab-runner.yml << EOF apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: gitlab-runner spec: gitlabUrl: https://gitlab.example.com buildImage: alpine token: gitlab-runner-secret EOF ``` 1. 次に、コマンドを実行して`CRD`ファイルを適用します: ```shell kubectl apply -f gitlab-runner.yml ``` 1. 以下を実行して、GitLab Runnerがインストールされていることを確認します: ```shell kubectl get runner NAME AGE gitlab-runner 5m ``` 1. Runnerポッドも表示されるはずです: ```shell kubectl get pods NAME READY STATUS RESTARTS AGE gitlab-runner-bf9894bdb-wplxn 1/1 Running 0 5m ``` #### OpenShift用の他のバージョンのGitLab Runner Operatorをインストールする {#install-other-versions-of-gitlab-runner-operator-for-openshift} Red Hat OperatorHubで使用可能なGitLab Runner Operatorのバージョンを使用しない場合は、別のバージョンをインストールできます。 公式に利用可能なOperatorのバージョンを確認するには、[`gitlab-runner-operator`リポジトリのタグを表示](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/tags)します。Operatorが実行しているGitLab Runnerのバージョンを確認するには、目的のコミットまたはタグの`APP_VERSION`ファイルの内容(たとえば、[https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/blob/1-17-stable/APP_VERSION](https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/-/blob/1-17-stable/APP_VERSION))を表示します。 特定のバージョンをインストールするには、この`catalogsource.yaml`ファイルを作成し、``をタグまたは特定のコミットに置き換えます: {{< alert type="note" >}} 特定のコミットのイメージを使用する場合、タグの形式は`v0.0.1-`です。例: `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator-catalog-source:v0.0.1-f5a798af`。 {{< /alert >}} ```yaml apiVersion: operators.coreos.com/v1alpha1 kind: CatalogSource metadata: name: gitlab-runner-catalog namespace: openshift-marketplace spec: sourceType: grpc image: registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator-catalog-source: displayName: GitLab Runner Operators publisher: GitLab Community ``` 以下を使用して`CatalogSource`を作成します: ```shell oc apply -f catalogsource.yaml ``` 1分以内に、新しいRunnerがOpenShiftクラスターのOperatorHubセクションに表示されるはずです。 ## オフライン環境のKubernetesクラスターにGitLab Runner Operatorをインストールする {#install-gitlab-runner-operator-on-kubernetes-clusters-in-offline-environments} 前提条件: - インストールプロセスに必要なイメージにアクセスできます。 インストール中にコンテナイメージをプルするために、GitLab Runner Operatorには、外部ネットワーク上のパブリックインターネットへの接続が必要です。オフライン環境にKubernetesクラスターがインストールされている場合は、ローカルイメージレジストリまたはパッケージレジストリを使用して、インストール中にイメージまたはパッケージをプルします。 ローカルリポジトリは、次のイメージを提供する必要があります: | 画像 | デフォルト値 | |-------------------------------------------------------|---------------| | **GitLab Runner Operator**イメージ | `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/gitlab-runner-operator:vGITLAB_RUNNER_OPERATOR_VERSION` | | **GitLab Runner**と**GitLab Runner Helper**のイメージ | これらのイメージは、GitLab Runner UBIイメージレジストリからダウンロードされ、Runnerカスタムリソースのインストール時に使用されます。使用するバージョンは、要件によって異なります。 | | **RBAC Proxy**イメージ | `registry.gitlab.com/gitlab-org/gl-openshift/gitlab-runner-operator/openshift4/ose-kube-rbac-proxy:v4.13.0` | 1. ダウンロードしたソフトウェアパッケージとコンテナイメージをホストするために、切断されたネットワーク環境でローカルリポジトリまたはレジストリをセットアップします。使用できるモデルは次のとおりです: - コンテナイメージ用のDockerレジストリ。 - Kubernetesバイナリと依存関係のためのローカルパッケージレジストリ。 1. GitLab Runner Operator v1.23.2以降の場合は、`operator.k8s.yaml`ファイルの最新バージョンをダウンロードします: ```shell curl -O "https://gitlab.com/gitlab-org/gl-openshift/gitlab-runner- operator/-/releases/vGITLAB_RUNNER_OPERATOR_VERSION/downloads/operator.k8s.yaml" ``` 1. `operator.k8s.yaml`ファイルで、次のURLを更新します: - `GitLab Runner Operator image` - `RBAC Proxy image` 1. 更新されたバージョンの`operator.k8s.yaml`ファイルをインストールします: ```shell kubectl apply -f PATH_TO_UPDATED_OPERATOR_K8S_YAML GITLAB_RUNNER_OPERATOR_VERSION = 1.23.2+ ``` ## Operatorをアンインストール {#uninstall-operator} ### Red Hat OpenShiftでアンインストールする {#uninstall-on-red-hat-openshift} 1. Runner `CRD`を削除します: ```shell kubectl delete -f gitlab-runner.yml ``` 1. `secret`を削除します: ```shell kubectl delete -f gitlab-runner-secret.yml ``` 1. [Webコンソールを使用してクラスターからOperatorを削除する](https://docs.redhat.com/en/documentation/openshift_container_platform/4.7/html/operators/administrator-tasks#olm-deleting-operators-from-a-cluster-using-web-console_olm-deleting-operators-from-a-cluster)については、Red Hatドキュメントの手順に従ってください。 ### Kubernetesでアンインストールする {#uninstall-on-kubernetes} 1. Runner `CRD`を削除します: ```shell kubectl delete -f gitlab-runner.yml ``` 1. `secret`を削除します: ```shell kubectl delete -f gitlab-runner-secret.yml ``` 1. Operatorサブスクリプションを削除します: ```shell kubectl delete subscription my-gitlab-runner-operator -n operators ``` 1. インストールされている`CSV`のバージョンを確認します: ```shell kubectl get clusterserviceversion -n operators NAME DISPLAY VERSION REPLACES PHASE gitlab-runner-operator.v1.7.0 GitLab Runner 1.7.0 Succeeded ``` 1. `CSV`を削除します: ```shell kubectl delete clusterserviceversion gitlab-runner-operator.v1.7.0 -n operators ``` #### 設定 {#configuration} OpenShiftでGitLab Runnerを設定するには、[OpenShiftでのGitLab Runnerの設定](../configuration/configuring_runner_operator.md)ページを参照してください。 #### モニタリング {#monitoring} GitLab Runner Operatorデプロイメントのモニタリングとメトリクス収集を有効にするには、[GitLab Runnerのモニタリング](../monitoring/_index.md#monitor-operator-managed-gitlab-runners)を参照してください。 ================================================ FILE: docs-locale/ja-jp/install/osx.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: macOSにGitLab Runnerをインストールします。 title: macOSにGitLab Runnerをインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} このページでは、macOS(Apple SiliconおよびIntel x86-64)にGitLab Runnerをインストールする方法を説明します。 {{< alert type="note" >}} GitLab RunnerをインストールするmacOSユーザーは、通常、ローカルまたはリモートで実行されるコンテナまたは仮想マシンに[GitLabをインストール](https://docs.gitlab.com/install/install_methods/)します。 {{< /alert >}} 1. ご使用のシステムに対応するバイナリをダウンロードします。 - Intelベースのシステムの場合は次のようにします。 ```shell sudo curl --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-amd64" ``` - Apple Siliconベースのシステムの場合は次のようにします。 ```shell sudo curl --output /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-arm64" ``` [Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. 実行のための権限を付与します。 ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. GitLab Runnerアプリケーションを実行するユーザーアカウントで、次の手順に従います。 1. [Runner設定を登録](../register/_index.md)します。登録プロセスで[Shell executor](../executors/shell.md)を選択します。macOSでiOSアプリケーションまたはmacOSアプリケーションをビルドする場合、ジョブはホスト上で直接実行され、認証済みユーザーのIDを使用します。ジョブはコンテナ内で実行されません。このため、コンテナexecutorを使用する場合よりも安全性が低くなります。詳細については、[セキュリティ](../security/_index.md#usage-of-shell-executor)に関する考慮事項のドキュメントを参照してください。 1. ターミナルを開き、現在のユーザーに切り替えます。 ```shell su - ``` 1. GitLab Runnerをサービスとしてインストールして開始します。 ```shell cd ~ gitlab-runner install gitlab-runner start ``` これらのコマンドの実行時に発生する可能性のあるエラーの解決方法について詳しくは、[トラブルシューティングのセクション](#macos-troubleshooting)を参照してください。 1. システムを再起動します。 上記の手順に従った場合、GitLab Runnerの設定ファイル(`config.toml`)は`/Users//.gitlab-runner/`にあります。[Runner](../configuration/advanced-configuration.md)の設定の詳細について参照してください。 詳細については、[用語集](../_index.md#glossary)を参照してください。 ## 既知の問題 {#known-issues} {{< alert type="note" >}} サービスは、現在のユーザーとしてログインしているターミナルウィンドウからインストールする必要があります。このようにインストールした場合にのみ、サービスを管理できます。 {{< /alert >}} 現在のユーザーとしてサインインするには、ターミナルでコマンド`su - `を実行します。ユーザー名を取得するには、コマンド`ls /users`を実行します。 macOSでサービスを動作させるための唯一の実証済みの方法は、ユーザーモードでサービスを実行することです。 サービスはユーザーがログインしている場合にのみ実行されるため、macOSマシンで自動ログインを有効にする必要があります。 サービスは`LaunchAgent`として起動されます。`LaunchAgents`を使用することでビルドはUIインタラクションを実行でき、iOSシミュレーターで実行およびテストできるようになります。 macOSには`LaunchDaemons`(バックグラウンドで完全に実行されるサービス)もあることに注意してください。`LaunchDaemons`はシステムの起動時に実行されますが、`LaunchAgents`と同じUIインタラクションへのアクセス権限はありません。Runnerのサービスを`LaunchDaemon`として実行することもできますが、この動作モードはサポートされていません。 `install`コマンドの実行後に`~/Library/LaunchAgents/gitlab-runner.plist`ファイルを検証することで、GitLab Runnerがサービス設定ファイルを作成したことを確認できます。 Homebrewを使用して`git`をインストールした場合、以下を含む`/usr/local/etc/gitconfig`ファイルが追加されている可能性があります。 ```ini [credential] helper = osxkeychain ``` これは、ユーザー認証情報をキーチェーンにキャッシュするようにGitに指示しますが、これが必要な動作ではない可能性があります。また、これが原因でフェッチがハングする可能性があります。次のコマンドを使用して、システムの`gitconfig`からこの行を削除できます。 ```shell git config --system --unset credential.helper ``` または、GitLabユーザーの`credential.helper`を無効にすることもできます。 ```shell git config --global --add credential.helper '' ``` 次のコマンドを使用して、`credential.helper`の状態を確認できます。 ```shell git config credential.helper ``` ## GitLab Runnerをアップグレードする {#upgrade-gitlab-runner} 1. サービスを停止します。 ```shell gitlab-runner stop ``` 1. バイナリをダウンロードして、GitLab Runner実行可能ファイルを置き換えます。 - Intelベースのシステムの場合は次のようにします。 ```shell sudo curl -o /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-amd64" ``` - Apple Siliconベースのシステムの場合は次のようにします。 ```shell sudo curl -o /usr/local/bin/gitlab-runner "https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-darwin-arm64" ``` [Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. 実行のための権限を付与します。 ```shell sudo chmod +x /usr/local/bin/gitlab-runner ``` 1. サービスを開始します。 ```shell gitlab-runner start ``` ## サービスファイルをアップグレードする {#upgrade-the-service-file} `LaunchAgent`設定をアップグレードするには、サービスをアンインストールしてからインストールする必要があります。 ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` ## `codesign`をGitLab Runnerサービスで使用する {#using-codesign-with-the-gitlab-runner-service} Homebrewを使用してmacOSに`gitlab-runner`をインストールしており、ビルドが`codesign`を呼び出すときに、ユーザーキーチェーンにアクセスできるように`SessionCreate`を設定する必要がある場合があります。GitLabはHomebrewのformulaを保持しないため、公式バイナリを使用してGitLab Runnerをインストールする必要があります。 次の例では、`gitlab`ユーザーとしてビルドを実行し、コード署名のためにそのユーザーがインストールした署名証明書へのアクセスを必要とします。 ```xml SessionCreate KeepAlive SuccessfulExit RunAtLoad Disabled Label com.gitlab.gitlab-runner UserName gitlab GroupName staff ProgramArguments /usr/local/opt/gitlab-runner/bin/gitlab-runner run --working-directory /Users/gitlab/gitlab-runner --config /Users/gitlab/gitlab-runner/config.toml --service gitlab-runner --syslog EnvironmentVariables PATH /usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin ``` ## macOSのトラブルシューティング {#macos-troubleshooting} 以下のエラーは、macOSでのトラブルシューティングに関連しています。一般的なトラブルシューティングについては、[GitLab Runnerのトラブルシューティング](../faq/_index.md)を参照してください。 ### `killed: 9` {#killed-9} Apple Siliconベースのシステムでは、`gitlab-runner install`、`gitlab-runner start`、または`gitlab-runner register`コマンドを実行するときにこのエラーが発生する可能性があります。 このエラーを解決するには、`~/Library/LaunchAgents/gitlab-runner.plist`の`StandardOutPath`と`StandardErrorPath`の値で指定されたディレクトリが書き込み可能であることを確認します。 次の例では、`/Users/USERNAME/Library/LaunchAgents/gitlab-runner.plist`ファイルが編集されており、ログファイル用に新しい書き込み可能なディレクトリ`gitlab-runner-log`が含まれています。 ```xml StandardErrorPath /Users/USERNAME/gitlab-runner-log/gitlab-runner.err.log StandardOutPath /Users/USERNAME/gitlab-runner-log/gitlab-runner.out.log ``` ### エラー: `"launchctl" failed: exit status 112, Could not find domain for` {#error-launchctl-failed-exit-status-112-could-not-find-domain-for} このメッセージは、macOSにGitLab Runnerをインストールしようとしたときに表示される場合があります。SSH接続ではなく、GUIターミナルアプリケーションからGitLab Runnerサービスを管理していることを確認してください。 ### メッセージ: `Failed to authorize rights (0x1) with status: -60007.` {#message-failed-to-authorize-rights-0x1-with-status--60007} macOSを使用しているときにGitLab Runnerが上記のメッセージでブロックされた場合、この状況が発生する原因は2つあります。 1. ユーザーがUIインタラクションを実行できることを確認します。 ```shell DevToolsSecurity -enable sudo security authorizationdb remove system.privilege.taskport is-developer ``` 1番目のコマンドは、ユーザーのデベロッパーツールへのアクセスを有効にします。2番目のコマンドは、デベロッパーグループのメンバーであるユーザーがUIインタラクションを実行できるようにします(iOSシミュレーターの実行など)。 1. GitLab Runnerサービスが`SessionCreate = true`を使用していないことを確認します。以前は、GitLab Runnerをサービスとして実行するときに`SessionCreate`を使用して`LaunchAgents`を作成していました。その時点(**Mavericks**)では、これがコード署名を機能させるための唯一の解決策でした。これは最近、**OS X El Capitan**で変更されました。OS X El Capitanでは、この動作を変更する多くの新しいセキュリティ機能が導入されました。 `SessionCreate`。ただしアップグレードの場合は、`LaunchAgent`スクリプトを手動で再インストールする必要があります。 ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` これで、`~/Library/LaunchAgents/gitlab-runner.plist`で`SessionCreate`が`false`に設定されていることを検証できます。 ### ジョブエラー: `Failed to connect to path port 3000: Operation timed out` {#job-error-failed-to-connect-to-path-port-3000-operation-timed-out} ジョブの1つがこのエラーで失敗した場合は、RunnerがGitLabインスタンスに接続できることを確認してください。接続は、次のような原因によってブロックされる可能性があります。 - ファイアウォール - プロキシ - 権限 - ルーティング設定 ### エラー: `gitlab-runner start`コマンドで`FATAL: Failed to start gitlab-runner: exit status 134` {#error-fatal-failed-to-start-gitlab-runner-exit-status-134-on-gitlab-runner-start-command} このエラーは、GitLab Runnerサービスが正しくインストールされていないことを示しています。このエラーを解決するには、次のコマンドを実行します。 ```shell gitlab-runner uninstall gitlab-runner install gitlab-runner start ``` エラーが解決しない場合は、グラフィカルログインを実行します。グラフィカルログインは、サービスの起動に必要な`LaunchAgent`をブートストラップします。詳細については、[既知の問題](osx.md#known-issues)を参照してください。 AWSでホストされているmacOSインスタンスは、インスタンスのGUIに接続するために[追加の手順](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/connect-to-mac-instance.html)を実行する必要があります。`ssh -L`オプションを使用してSSHポート転送を有効にし、`vnc`などのリモートデスクトップクライアントがリモートインスタンスに接続できるようにします。また、AWSでホストされているmacOSインスタンスの`/private/etc/ssh/sshd_config`で`AllowTcpForwarding yes`を設定する必要があります。インスタンスを再起動して、`sshd`設定への変更を適用します。エラーを解決するため、GUIにサインインした後、GUIのターミナルからGitLab Runnerのトラブルシューティングの手順を繰り返し行います。 ### エラー: `"launchctl" failed with stderr: Load failed: 5: Input/output error` {#error-launchctl-failed-with-stderr-load-failed-5-inputoutput-error} `gitlab-runner start`コマンドの実行時にこのエラーが発生した場合は、まず、Runnerがすでに実行中かどうかを確認してください: ```shell gitlab-runner status ``` Runnerがすでに実行中の場合は、再度開始する必要はありません。実行されておらず、それでもこのエラーが発生する場合は、`~/Library/LaunchAgents/gitlab-runner.plist`の値`StandardOutPath`と`StandardErrorPath`で指定されたディレクトリが存在することを確認してください: ```xml StandardOutPath /usr/local/var/log/gitlab-runner.out.log StandardErrorPath /usr/local/var/log/gitlab-runner.err.log ``` ディレクトリが存在しない場合はディレクトリを作成し、それらに対する読み取りおよび書き込みを行うための適切な権限がRunnerサービスユーザーにあることを確認します。次に、Runnerを起動します: ```shell gitlab-runner start ``` ### エラー: `Error on fetching TLS Data from API response... error error=couldn't build CA Chain` {#error-error-on-fetching-tls-data-from-api-response-error--errorcouldnt-build-ca-chain} GitLab Runner v15.5.0以降にアップグレードすると、次のエラーが発生することがあります。 ```plaintext Certificate doesn't provide parent URL: exiting the loop Issuer=Baltimore CyberTrust Root IssuerCertURL=[] Serial=33554617 Subject=Baltimore CyberTrust Root context=certificate-chain-build Verifying last certificate to find the final root certificate Issuer=Baltimore CyberTrust Root IssuerCertURL=[] Serial=33554617 Subject=Baltimore CyberTrust Root context=certificate-chain-build ERROR: Error on fetching TLS Data from API response... error error=couldn't build CA Chain: error while fetching certificates from TLS ConnectionState: error while fetching certificates into the CA Chain: couldn't resolve certificates chain from the leaf certificate: error while resolving certificates chain with verification: error while verifying last certificate from the chain: x509: “Baltimore CyberTrust Root” certificate is not permitted for this usage runner=x7kDEc9Q ``` このエラーが発生した場合は、次の操作を行う必要があります。 1. GitLab Runner v15.5.1以降にアップグレードします。 1. [`[runners.feature_flags]`設定](../configuration/feature-flags.md#enable-feature-flag-in-runner-configuration)で`FF_RESOLVE_FULL_TLS_CHAIN`を`false`に設定します。下記は例です: ```toml [[runners]] name = "example-runner" url = "https://gitlab.com/" token = "TOKEN" executor = "docker" [runners.feature_flags] FF_RESOLVE_FULL_TLS_CHAIN = false ``` この機能フラグを無効にすると、SHA-1署名またはその他の非推奨のルート証明書署名を使用するHTTPSエンドポイントのTLS接続の問題を修正できる場合があります。 ================================================ FILE: docs-locale/ja-jp/install/requirements.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: CI/CDジョブ用ソフトウェア title: システム要件とサポートされているプラットフォーム --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} ## サポートされているオペレーティングシステム {#supported-operating-systems} GitLab Runnerは次の環境にインストールできます: - [GitLabリポジトリ](linux-repository.md)または[手動](linux-manually.md)でLinuxに - [FreeBSD](freebsd.md) - [macOS](osx.md) - [Windows](windows.md) - [z/OS](z-os.md) [最先端バイナリ](bleeding-edge.md)も利用可能です。 別のオペレーティングシステムを使用するには、そのオペレーティングシステムがGoバイナリをコンパイルできることを確認してください。 ## サポートされているコンテナ {#supported-containers} GitLab Runnerは以下を使用してインストールできます: - [Docker](docker.md) - [The GitLab Helmチャート](kubernetes.md) - [The Kubernetes向けGitLabエージェント](kubernetes-agent.md) - [The GitLab Operator](operator.md) ## サポートされているアーキテクチャ {#supported-architectures} GitLab Runnerは以下のアーキテクチャで利用可能です: - x86 - AMD64 - ARM64 - ARM - s390x - ppc64le - riscv64 - loong64 ## システム要件 {#system-requirements} GitLab Runnerのシステム要件は、以下の考慮事項によって異なります: - CI/CDジョブの予想されるCPU負荷 - CI/CDジョブの予想されるメモリ使用量 - 同時実行されるCI/CDジョブの数 - アクティブに開発されているプロジェクトの数 - 並行して作業するデベロッパーの予想数 GitLab.comで利用可能なマシンタイプについては、[GitLabホスト型Runner](https://docs.gitlab.com/ci/runners/)を参照してください。 ## FIPS準拠のGitLab Runner {#fips-compliant-gitlab-runner} FIPS 140-2準拠のGitLab Runnerバイナリは、Red Hat Enterprise Linux(RHEL)ディストリビューションおよびAMD64アーキテクチャで利用可能です。他のディストリビューションおよびアーキテクチャのサポートは、[28814イシュー](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28814)で提案されています。 このバイナリは、[Red Hat Goコンパイラ](https://developers.redhat.com/blog/2019/06/24/go-and-fips-140-2-on-red-hat-enterprise-linux)でビルドされており、FIPS 140-2で検証された暗号学的ライブラリを呼び出しています。A [UBI-8 minimal image](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html-single/building_running_and_managing_containers/index#con_understanding-the-ubi-minimal-images_assembly_types-of-container-images)は、GitLab Runner FIPSイメージを作成するためのベースとして使用されます。 RHELでFIPS準拠のGitLab Runnerを使用する方法の詳細については、[Switching RHEL to FIPS mode](https://docs.redhat.com/en/documentation/red_hat_enterprise_linux/8/html/security_hardening/switching-rhel-to-fips-mode_security-hardening)を参照してください。 ================================================ FILE: docs-locale/ja-jp/install/step-runner.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see description: GitLab Functionsを使用するために、step runnerを手動でインストールします。 title: step runnerを手動でインストールします --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} step runnerは、ネイティブ関数をサポートしないexecutorでGitLab RunnerがGitLab Functionsを実行できるようにするバイナリです。これらのexecutorでは、パイプラインで関数を使用する前に、ジョブが実行されるホストまたはコンテナにstep runnerのバイナリをインストールする必要があります。 ## 手動でのstep runnerインストールが必要なexecutor {#executors-that-require-manual-step-runner-installation} step runnerを手動でインストールする必要があるかどうかは、お使いのexecutorによって異なります。以下の表は、手動でのstep runnerのインストールが必要なexecutorを示しています: | executor | 手動インストールが必要 | |-------------------|------------------------------| | Shell | はい | | SSH | はい | | Kubernetes | はい | | VirtualBox | はい | | Parallels | はい | | カスタム | はい | | インスタンス | はい | | Docker | Windowsのみ | | Docker Autoscaler | Windowsのみ | | Docker Machine | Windowsのみ | 手動インストールが不要なexecutorの場合、`gitlab-runner-helper`がstep runnerとして機能します。これらのexecutorには、`step-runner`バイナリは存在せず、必要もありません。 ### 変数アクセス制限 {#variable-access-restrictions} step runnerを手動でインストールしたexecutorでは、step runnerはジョブ変数と環境変数へのアクセスが制限されます: | 構文 | 利用可能な値 | |----------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `${{ vars. }}` | プレフィックスが`CI_`、`DOCKER_`、または`GITLAB_`のジョブ変数のみ。 | | `${{ env. }}` | `HTTPS_PROXY`, `HTTP_PROXY`, `NO_PROXY`, `http_proxy`, `https_proxy`, `no_proxy`, `all_proxy`, `LANG`, `LC_ALL`, `LC_CTYPE`, `LOGNAME`, `USER`, `PATH`, `SHELL`, `TERM`, `TMPDIR`, `TZ` | ## step runnerを手動でインストールします {#install-step-runner-manually} 複数のプラットフォーム向けのコンパイルされたバイナリは、[step runnerのリリースページ](https://gitlab.com/gitlab-org/step-runner/-/releases)から入手できます。サポートされているプラットフォームには、Windows、Linux、macOS、およびFreeBSDがあり、複数のアーキテクチャ(amd64、arm64、386、ARM、s390x、ppc64le)に対応しています。 ### バイナリの信頼性を検証します {#verify-authenticity-of-the-binary} インストールする前に、バイナリが改ざんされておらず、公式のGitLabチームから提供されていることを確認してください。 1. GPG公開キーをダウンロードしてインポートします: ```shell # All platforms (requires gpg installed: https://gnupg.org/download/) curl -o step-runner.pub.gpg "https://gitlab.com/gitlab-org/step-runner/-/package_files/257922684/download" gpg --import step-runner.pub.gpg gpg --fingerprint ``` インポートしたキーが以下と一致することを確認してください: | キー属性 | 値 | |---------------|------------------------------------------------------| | 名前 | `GitLab, Inc.` | | メール | `support@gitlab.com` | | フィンガープリント | `0FCD 59B1 6F4A 62D0 3839 27A5 42FF CA71 62A5 35F5` | | 有効期限 | `2029-01-05` | 1. [リリースページ](https://gitlab.com/gitlab-org/step-runner/-/releases)から、以下のファイルをダウンロードしてください: - お使いのプラットフォーム用のバイナリ(例: `step-runner-linux-amd64`または`step-runner-darwin-arm64`) - `step-runner-release.sha256` - `step-runner-release.sha256.asc` 1. GPG署名を検証します: ```shell # All platforms (requires gpg) gpg --verify step-runner-release.sha256.asc step-runner-release.sha256 ``` 出力には`Good signature`メッセージが含まれているはずです。 1. バイナリのチェックサムを検証します: ```shell # Linux sha256sum -c step-runner-release.sha256 ``` ```shell # macOS shasum -a 256 -c step-runner-release.sha256 ``` ```shell # Windows (PowerShell) — replace 'step-runner-windows-amd64.exe' with your binary name $binary = "step-runner-windows-amd64.exe" $expected = (Select-String -Path "step-runner-release.sha256" -Pattern $binary).Line.Split(" ")[0] $actual = (Get-FileHash -Algorithm SHA256 $binary).Hash.ToLower() if ($actual -eq $expected) { "OK" } else { "FAILED: checksum mismatch" } ``` 出力には、お使いのバイナリに対して`OK`が表示されるはずです。 ### step-runnerをPATHに追加します {#add-step-runner-to-path} バイナリをダウンロードして検証したら、ジョブが実行されるインスタンスの`PATH`で利用できるようにします。このインスタンスは、executorによってはホストマシンまたはコンテナの場合があります。 1. バイナリを`step-runner`(Windowsでは`step-runner.exe`)に名前変更します: ```shell mv step-runner-- step-runner ``` 1. Unix系システムでは、バイナリを実行可能にします: ```shell chmod +x step-runner ``` 1. バイナリを`PATH`上のディレクトリに移動します: ```shell mv step-runner /usr/local/bin/ ``` ================================================ FILE: docs-locale/ja-jp/install/support-policy.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerのサポートポリシー --- GitLab Runnerのサポートポリシーは、オペレーティングシステムのライフサイクルポリシーによって決定されます。 ## コンテナイメージのサポート {#container-images-support} GitLab Runnerコンテナイメージの作成に使用されるベースイメージディストリビューション(Ubuntu、Alpine、Red Hat Universalベースイメージ)のサポートライフサイクルに従います。 ベースディストリビューションの公開終了日は、必ずしもGitLabのメジャーリリースサイクルと一致するとは限りません。つまり、マイナーリリースでは、GitLab Runnerコンテナイメージのバージョンの公開を停止します。これにより、アップストリームディストリビューションが更新しなくなったイメージは公開されなくなります。 ### コンテナイメージと公開終了日 {#container-images-and-end-of-publishing-date} | ベースコンテナ | ベースコンテナのバージョン | ベンダーのサービス終了日 | GitLabのサービス終了日 | |--------------------------------|------------------------|-----------------|-----------------| | Ubuntu | 24.04 | 2027-04-30 | 2027-05-20 | | Ubuntu | 20.04 | 2025-05-31 | 2025-06-19 | | Alpine | 3.12 | 2022-05-01 | 2023-05-22 | | Alpine | 3.13 | 2022-11-01 | 2023-05-22 | | Alpine | 3.14 | 2023-05-01 | 2023-05-22 | | Alpine | 3.15 | 2023-11-01 | 2024-01-18 | | Alpine | 3.16 | 2024-05-23 | 2024-06-22 | | Alpine | 3.17 | 2024‑11‑22 | 2024-12-22 | | Alpine | 3.18 | 2025‑05‑09 | 2025-05-22 | | Alpine | 3.19 | 2025‑11‑01 | 2025-11-22 | | Alpine | 3.21 | 2026‑11‑01 | 2026-11-22 | | Alpine | latest | | | | Red Hat Universalベースイメージ9 | 9.5 | 2025-04-31 | 2025-05-22 | GitLab Runnerバージョン17.7以降は、特定のバージョンの代わりに、単一のAlpineバージョン(`latest`)のみをサポートします。Alpineバージョン3.21は、明記されているサービス終了日までサポートされます。対照的に、Ubuntu 24.04はサービス終了日までサポートされ、その時点で最新のLTSリリースに移行します。 ## Windowsバージョンのサポート {#windows-version-support} GitLabは、Microsoft WindowsオペレーティングシステムのLTSバージョンを正式にサポートしているため、Microsoftの[Servicing Channels](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#servicing-channels)ライフサイクルポリシーに従います。 これは、以下をサポートすることを意味します: - [Long-Term Servicing Channel](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#long-term-servicing-channel)バージョンは、リリース日から5年間サポートされます。 5年後、Microsoftはさらに5年間の延長サポートを提供します。この延長期間中、可能な限りサポートを提供します。GitLabのメジャーリリースでは、発表をもってこのサポートを終了する場合があります。 - [Semi-Annual Channel](https://learn.microsoft.com/en-us/windows/deployment/update/waas-overview#semi-annual-channel)バージョンは、リリース日から18か月間サポートされます。メインストリームサポートが終了すると、これらのバージョンはサポートされません。 このサポートポリシーは、配布する[Windows binaries](windows.md#installation)と[Docker executor](../executors/docker.md#supported-windows-versions)に適用されます。 {{< alert type="note" >}} WindowsコンテナのDockerexecutorには、ホストOSのバージョンと一致する必要があるため、厳格なバージョン要件があります。詳細については、[サポートされているWindowsコンテナの一覧](../executors/docker.md#supported-windows-versions)を参照してください。 {{< /alert >}} 信頼できる唯一の情報源として、を使用します。これには、リリース日、メインストリームサポート日、および延長サポート日が指定されています。 以下は、一般的に使用されるバージョンとそのサービス終了日の一覧です: | オペレーティングシステム | メインストリームサポート終了日 | 延長サポート終了日 | |----------------------------|-----------------------------|---------------------------| | Windows Server 2019(1809) | 2024年1月 | 2029年1月 | | Windows Server 2022(21H2) | 2026年10月 | 2031年10月 | | Windows Server 2025(24H2) | 2029年10月 | 2034年10月 | ### 今後のリリース {#future-releases} Microsoftは、[Semi-Annual Channel](https://learn.microsoft.com/en-us/windows-server/get-started/servicing-channels-comparison#semi-annual-channel)で新しいWindows Server製品を年に2回リリースし、2〜3年ごとに、Windows Severの新しいメジャーバージョンが[Long-Term Servicing Channel(LTSC)](https://learn.microsoft.com/en-us/windows-server/get-started/servicing-channels-comparison#long-term-servicing-channel-ltsc)でリリースされます。 GitLabは、Google Cloud Platform上のMicrosoftの公式リリース日から1か月以内に、最新のWindows Serverバージョン(Semi-Annual Channel)を含む新しいGitLab Runnerヘルパーイメージをテストおよびリリースすることを目指しています。利用可能日は、[サービスオプションリスト別のWindows Server現在のバージョン](https://learn.microsoft.com/en-us/windows/release-health/windows-server-release-info#windows-server-current-versions-by-servicing-option)を参照してください。 ================================================ FILE: docs-locale/ja-jp/install/windows.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: WindowsシステムにGitLab Runnerをインストールします。 title: WindowsにGitLab Runnerをインストールする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} WindowsにGitLab Runnerをインストールして実行するには、以下が必要です。 - Git([公式ウェブサイト](https://git-scm.com/download/win)からインストールできます) - ユーザーアカウントのパスワード(組み込みのシステムアカウントではなく、ユーザーアカウントで実行する場合)。 - 文字エンコードの問題を回避するために、システムロケールが英語(米国)に設定されていること。詳細については、[イシュー38702](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38702)を参照してください。 ## インストール {#installation} 1. システム内の任意の場所(`C:\GitLab-Runner`など)にフォルダーを作成します。 1. [64ビット](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-amd64.exe)または[32ビット](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-386.exe)のバイナリをダウンロードし、作成したフォルダーに配置します。以降の説明では、バイナリの名前を`gitlab-runner.exe`に変更したこと(オプション)を前提としています。[Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. GitLab Runnerのディレクトリと実行可能ファイルに対する`Write`権限を制限してください。これらの権限を設定しないと、一般ユーザーが実行可能ファイルを独自のファイルに置き換え、管理者権限で任意のコードを実行してしまう可能性があります。 1. [管理者権限でのコマンドプロンプト](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator)を実行します。 1. [Runnerを登録します](../register/_index.md)。 1. GitLab Runnerをサービスとしてインストールして開始します。組み込みのシステムアカウント(推奨)またはユーザーアカウントを使用してサービスを実行できます。 **組み込みのシステムアカウントを使用してサービスを実行する**(ステップ1で作成したサンプルディレクトリ`C:\GitLab-Runner`内) ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe install .\gitlab-runner.exe start ``` **ユーザーアカウントを使用してサービスを実行する**(ステップ1で作成したサンプルディレクトリ`C:\GitLab-Runner`内) 現在のユーザーアカウントの有効なパスワードを入力する必要があります。これは、Windowsでサービスを開始するために必要であるためです。 ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe install --user ENTER-YOUR-USERNAME --password ENTER-YOUR-PASSWORD .\gitlab-runner.exe start ``` GitLab Runnerのインストール中にエラーが発生した場合は、[トラブルシューティングのセクション](#windows-troubleshooting)を参照してください。 1. (オプション)[高度な設定の詳細](../configuration/advanced-configuration.md)で詳しく説明されているようにして、複数の同時ジョブを許可するため、`C:\GitLab-Runner\config.toml`でRunnerの`concurrent`の値を更新します。また、高度な設定の詳細を使用して、BatchではなくBashまたはPowerShellを使用するようにShell executorを更新できます。 これで、Runnerがインストールされ、実行され、システムを再起動するたびに再起動されるようになります。ログはWindowsイベントログに保存されます。 ## アップグレード {#upgrade} 1. サービスを停止します(以前と同様に[管理者権限でのコマンドプロンプト](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator)が必要です)。 ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe stop ``` 1. [64ビット](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-amd64.exe)または[32ビット](https://s3.dualstack.us-east-1.amazonaws.com/gitlab-runner-downloads/latest/binaries/gitlab-runner-windows-386.exe)のバイナリをダウンロードし、Runnerの実行可能ファイルを置き換えます。[Bleeding Edge - その他のタグ付きリリースをダウンロードする](bleeding-edge.md#download-any-other-tagged-release)の説明に従って、利用可能なすべてのバージョンのバイナリをダウンロードできます。 1. サービスを開始します。 ```powershell .\gitlab-runner.exe start ``` ## アンインストール {#uninstall} [管理者権限でのコマンドプロンプト](https://learn.microsoft.com/en-us/powershell/scripting/windows-powershell/starting-windows-powershell?view=powershell-7.4#with-administrative-privileges-run-as-administrator)から次のようにします。 ```powershell cd C:\GitLab-Runner .\gitlab-runner.exe stop .\gitlab-runner.exe uninstall cd .. rmdir /s GitLab-Runner ``` ## Windowsのトラブルシューティング {#windows-troubleshooting} [FAQ](../faq/_index.md)セクションを参照してください。このセクションでは、GitLab Runnerに関する最も一般的な問題について説明しています。 _アカウント名が無効です_のようなエラーが発生した場合は、以下を試してください。 ```powershell # Add \. before the username .\gitlab-runner.exe install --user ".\ENTER-YOUR-USERNAME" --password "ENTER-YOUR-PASSWORD" ``` サービスの開始中に`The service did not start due to a logon failure`エラーが発生した場合は、[FAQセクション](#error-the-service-did-not-start-due-to-a-logon-failure)を参照して、問題を解決する方法を確認してください。 Windowsパスワードがない場合は、GitLab Runnerサービスを開始できませんが、組み込みのシステムアカウントを使用できます。 組み込みのシステムアカウントの問題については、Microsoftのサポートウェブサイトの[Configure the Service to Start Up with the Built-in System Account](https://learn.microsoft.com/en-us/troubleshoot/windows-server/system-management-components/service-startup-permissions#resolution-3-configure-the-service-to-start-up-with-the-built-in-system-account)を参照してください。 ### Runnerのログを取得する {#get-runner-logs} `.\gitlab-runner.exe install`を実行すると、`gitlab-runner`がWindowsサービスとしてインストールされます。イベントビューアーで、プロバイダー名`gitlab-runner`でログを見つけることができます。 GUIにアクセスできない場合は、PowerShellで[`Get-WinEvent`](https://learn.microsoft.com/en-us/powershell/module/microsoft.powershell.diagnostics/get-winevent?view=powershell-7.4)を実行できます。 ```shell PS C:\> Get-WinEvent -ProviderName gitlab-runner ProviderName: gitlab-runner TimeCreated Id LevelDisplayName Message ----------- -- ---------------- ------- 2/4/2025 6:20:14 AM 1 Information [session_server].listen_address not defined, session endpoints disabled builds=0... 2/4/2025 6:20:14 AM 1 Information listen_address not defined, metrics & debug endpoints disabled builds=0... 2/4/2025 6:20:14 AM 1 Information Configuration loaded builds=0... 2/4/2025 6:20:14 AM 1 Information Starting multi-runner from C:\config.toml... builds=0... ``` ### Windowsでのビルド中に`PathTooLongException`が発生する {#i-get-a-pathtoolongexception-during-my-builds-on-windows} このエラーは、`npm`などのツールが、長さが260文字を超えるパスを含むディレクトリ構造を生成することがあるために発生します。この問題を解決するには、次のいずれかの解決策を採用します。 - `core.longpaths`が有効になっているGitを使用します。 Gitを使用してディレクトリ構造をクリーンアップすることで、問題を回避できます。 1. コマンドラインから`git config --system core.longpaths true`を実行します。 1. GitLab CIプロジェクト設定ページで、`git fetch`を使用するようにプロジェクトを設定します。 - PowerShell用のNTFSSecurityツールを使用します。 [NTFSSecurity](https://github.com/raandree/NTFSSecurity) PowerShellモジュールは、長いパスをサポートする`Remove-Item2`メソッドを提供します。このモジュールが利用可能な場合は、GitLab Runnerによってそれが検出され、自動的にそれが利用されます。 > GitLab Runner 16.9.1で導入されたリグレッションは、GitLab Runner 17.10.0で修正されています。リグレッションのあるGitLab Runnerバージョンを使用する場合は、次のいずれかの回避策を使用してください。 > > - `pre_get_sources_script`を使用することにより、Gitシステムレベルの設定を再度有効にします(`Git_CONFIG_NOSYSTEM`を設定解除します)。このアクションにより、Windowsで`core.longpaths`がデフォルトで有効になります。 > > ```yaml > build: > hooks: > pre_get_sources_script: > - $env:GIT_CONFIG_NOSYSTEM='' > ``` > > - カスタム`GitLab-runner-helper`イメージをビルドします。 > > ```dockerfile > FROM registry.gitlab.com/gitlab-org/gitlab-runner/gitlab-runner-helper:x86_64-v17.8.3-servercore21H2 > ENV GIT_CONFIG_NOSYSTEM= > ``` ### Windowsバッチスクリプトのエラー: `The system cannot find the batch label specified - buildscript` {#error-with-windows-batch-scripts-the-system-cannot-find-the-batch-label-specified---buildscript} `.gitlab-ci.yml`のBatchファイル行の先頭に`call`を追加して、`call C:\path\to\test.bat`のように記述する必要があります。下記は例です: ```yaml before_script: - call C:\path\to\test.bat ``` 詳細については、[イシュー1025](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/1025)を参照してください。 ### Webターミナルで色付きの出力を得るにはどうすればよいですか? {#how-can-i-get-colored-output-on-the-web-terminal} **簡単な説明**: プログラムの出力にANSIカラーコードが含まれていることを確認してください。テキストの書式設定という点から、UNIX ANSIターミナルエミュレーターで実行しているとします(これはウェブインターフェースの出力であるため)。 **詳しい説明**: GitLab CIのウェブインターフェースは、UNIX ANSIターミナルをエミュレートします(少なくとも部分的に)。`gitlab-runner`は、ビルドからの出力をウェブインターフェースに直接パイプします。つまり、存在するANSIカラーコードはすべて有効になります。 古いバージョンのWindowsのコマンドプロンプトターミナル(Windows 10、バージョン1511より前)は、ANSIカラーコードをサポートしていません。代わりにwin32([`ANSI.SYS`](https://en.wikipedia.org/wiki/ANSI.SYS))呼び出しを使用しますが、この呼び出しは、表示される文字列に**存在していません**。クロスプラットフォームプログラムを作成する場合、デベロッパーは、通常、デフォルトでANSIカラーコードを使用します。このコードは、Windowsシステムで実行する場合([Colorama](https://pypi.org/project/colorama/)など)、win32呼び出しに変換されます。 ご使用のプログラムが上記の処理を実行している場合は、ANSIコードが文字列に残るように、CIビルドの変換を無効にする必要があります。 詳細については、[GitLab CI YAMLドキュメント](https://docs.gitlab.com/ci/yaml/#coloring-script-output)でPowerShellを使用する例を参照し、[イシュー332](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/332)を参照してください。 ### エラー: `The service did not start due to a logon failure` {#error-the-service-did-not-start-due-to-a-logon-failure} WindowsにGitLab Runnerサービスをインストールして開始するときに、このエラーが発生する可能性があります。 ```shell gitlab-runner install --password WINDOWS_MACHINE_PASSWORD gitlab-runner start FATA[0000] Failed to start GitLab Runner: The service did not start due to a logon failure. ``` このエラーは、サービスの実行に使用されるユーザーが`SeServiceLogonRight`権限を持っていない場合に発生する可能性があります。この場合、選択したユーザーにこの権限を追加してから、サービスを再度開始する必要があります。 1. **Control Panel > System and Security > Administrative Tools**に移動します。 1. **Local Security Policy**ツールを開きます。 1. 左側のリストで**Security Settings > Local Policies > User Rights Assignment**を選択します。 1. 右側のリストで**Log on as a service**を開きます。 1. **Add User or Group...**を選択します。 1. (「手動」で、または**Advanced...**を使用して)ユーザーを追加し、設定を適用します。 [Microsoftドキュメント](https://learn.microsoft.com/en-us/previous-versions/windows/it-pro/windows-server-2012-R2-and-2012/dn221981(v=ws.11))によると、これは次のWindowsバージョンで機能します。 - Windows Vista - Windows Server 2008 - Windows 7 - Windows 8.1 - Windows Server 2008 R2 - Windows Server 2012 R2 - Windows Server 2012 - Windows 8 Local Security Policyツールは、一部のWindowsバージョン(各バージョンの「Home Edition」バリアントなど)では使用できない場合があります。 サービス設定で使用されているユーザーに`SeServiceLogonRight`を追加すると、コマンド`gitlab-runner start`が失敗せずに終了し、サービスが正常に開始されます。 ### ジョブが誤って成功または失敗としてマークされる {#job-marked-as-success-or-failed-incorrectly} ほとんどのWindowsプログラムは、成功した場合には`exit code 0`を出力します。ただし、一部のプログラムは終了コードを返さないか、成功時の値が異なることがあります。例として、Windowsツール`robocopy`があります。次の`.gitlab-ci.yml`は成功するはずですが、`robocopy`によって出力された終了コードが原因で失敗します。 ```yaml test: stage: test script: - New-Item -type Directory -Path ./source - New-Item -type Directory -Path ./dest - Write-Output "Hello World!" > ./source/file.txt - robocopy ./source ./dest tags: - windows ``` 上記のケースでは、`script:`に終了コードチェックを手動で追加する必要があります。たとえば、PowerShellスクリプトを作成できます。 ```powershell $exitCodes = 0,1 robocopy ./source ./dest if ( $exitCodes.Contains($LastExitCode) ) { exit 0 } else { exit 1 } ``` `.gitlab-ci.yml`ファイルを次のように変更します。 ```yaml test: stage: test script: - New-Item -type Directory -Path ./source - New-Item -type Directory -Path ./dest - Write-Output "Hello World!" > ./source/file.txt - ./robocopyCommand.ps1 tags: - windows ``` また、PowerShell関数を使用する場合は、`return`と`exit`の違いに注意してください。`exit 1`はジョブを失敗としてマークしますが、`return 1`はそのようにマークしません。 ### Kubernetes executorを使用しているときにジョブが成功としてマークされ、途中で終了した {#job-marked-as-success-and-terminated-midway-using-kubernetes-executor} 詳細については、[ジョブの実行](../executors/kubernetes/_index.md#job-execution)を参照してください。 ### Docker executor: `unsupported Windows Version` {#docker-executor-unsupported-windows-version} GitLab Runnerは、サポートされていることを確認するためにWindows Serverのバージョンを確認します。 このために`docker info`を実行します。 GitLab Runnerが起動に失敗し、Windows Serverバージョンを指定せずにエラーを表示する場合、Dockerバージョンが古い可能性があります。 ```plaintext Preparation failed: detecting base image: unsupported Windows Version: Windows Server Datacenter ``` このエラーには、Windows Serverバージョンに関する詳細情報が含まれている必要があります。この情報が、GitLab Runnerがサポートするバージョンと比較されます。 ```plaintext unsupported Windows Version: Windows Server Datacenter Version (OS Build 18363.720) ``` Windows Server上のDocker 17.06.2は、`docker info`の出力で以下を返します。 ```plaintext Operating System: Windows Server Datacenter ``` このケースでの修正策は、Windows Serverリリースと同程度の古いDockerバージョンを、それよりも新しいDockerバージョンをアップグレードすることです。 ### Kubernetes executor: `unsupported Windows Version` {#kubernetes-executor-unsupported-windows-version} Windows上のKubernetes executorは、次のエラーで失敗することがあります。 ```plaintext Using Kubernetes namespace: gitlab-runner ERROR: Preparation failed: prepare helper image: detecting base image: unsupported Windows Version: Will be retried in 3s ... ERROR: Job failed (system failure): prepare helper image: detecting base image: unsupported Windows Version: ``` この問題を修正するには、GitLab Runner設定ファイルの`[runners.kubernetes.node_selector]`セクションに`node.kubernetes.io/windows-build`ノードセレクターを追加します。次に例を示します。 ```toml [runners.kubernetes.node_selector] "kubernetes.io/arch" = "amd64" "kubernetes.io/os" = "windows" "node.kubernetes.io/windows-build" = "10.0.17763" ``` ### マップされたネットワークドライブを使用しているが、ビルドが正しいパスを検出できない {#im-using-a-mapped-network-drive-and-my-build-cannot-find-the-correct-path} 管理者アカウントではなく標準ユーザーアカウントで実行されているGitLab Runnerは、マップされたネットワークドライブにアクセスできません。マップされたネットワークドライブを使用しようとすると、`The system cannot find the path specified.`エラーが発生します。このエラーは、サービスログオンセッションではリソースにアクセスする際に[セキュリティ制限](https://learn.microsoft.com/en-us/windows/win32/services/services-and-redirected-drives)があるために発生します。代わりに、ドライブの[UNCパス](https://learn.microsoft.com/en-us/dotnet/standard/io/file-path-formats#unc-paths)を使用します。 ### ビルドコンテナがサービスコンテナに接続できない {#the-build-container-is-unable-to-connect-to-service-containers} Windowsコンテナでサービスを使用するには、次のようにします。 - [ジョブごとにネットワークを作成する](../executors/docker.md#create-a-network-for-each-job)ネットワーキングモードを使用します。 - `FF_NETWORK_PER_BUILD`機能フラグが有効になっていることを確認します。 ### ジョブがビルドディレクトリを作成できず、エラーで失敗する {#the-job-cannot-create-a-build-directory-and-fails-with-an-error} `Docker-Windows` executorで`GitLab-Runner`を使用すると、ジョブが次のようなエラーで失敗することがあります。 ```shell fatal: cannot chdir to c:/builds/gitlab/test: Permission denied` ``` このエラーが発生した場合は、Dockerエンジンの実行ユーザーに、`C:\Program Data\Docker`に対する完全な権限があることを確認してください。Dockerエンジンは、特定のアクションでこのディレクトリに書き込むことができる必要がありますが、正しい権限がないと失敗します。 [WindowsでのDocker Engineの設定の詳細を参照してください](https://learn.microsoft.com/en-us/virtualization/windowscontainers/manage-docker/configure-docker-daemon)。 ### ジョブログのWindows Subsystem for Linux(WSL)STDOUT出力の空白行 {#blank-lines-for-windows-subsystem-for-linux-wsl-stdout-output-in-job-logs} デフォルトでは、Windows Subsystem for Linux(WSL)のSTDOUT出力はUTF8でエンコードされておらず、ジョブログに空白行として表示されます。STDOUT出力を表示するには、`WSL_UTF8`環境変数を設定して、WSLのエンコードを強制的にUTF8にすることができます。 ```yaml job: variables: WSL_UTF8: "1" ``` ================================================ FILE: docs-locale/ja-jp/install/z-os.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: z/OSにGitLab Runnerを手動でインストールします。 title: z/OSにGitLab Runnerを手動でインストール --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} IBM z/OS用のGitLab RunnerはGitLabによって認定されており、z/OSメインフレーム環境でネイティブにCI/CDジョブを実行できます。 [`pax`](https://www.ibm.com/docs/en/aix/7.1.0?topic=p-pax-command)アーカイブから、z/OS上にGitLab Runnerを手動でダウンロードしてインストールできます。 ## 前提条件 {#prerequisites} - GitLab Runnerを使用するには、次のAuthorized Program Analysisレポート(`APARs`)とProgram Temporary修正(`PTFs`)が必要です: - z/OS 2.5 - OA62757 - PH45182 - z/OS 3.1 - OA62757 - PH57159 - GitLab Runnerは、Shellコマンドを実行するために、`/bin/bash`にbashがインストールされていることを想定しています。bashがこの場所にインストールされていない場合は、インストールされているバージョンへのシンボリックリンクを作成します: ```shell ln -s /bin/bash ``` ## GitLab Runnerをインストールする {#install-gitlab-runner} GitLab Runnerをインストールするには、次の手順に従います。 1. 選択したインストールディレクトリに`paxfile`をダウンロードします。 1. ご使用のシステムのパッケージをインストールします: ```shell pax -ppx -rf gitlab-runner-.pax.Z ``` インストールされたファイルは、インストール場所の`gitlab-runner`ディレクトリに展開されます。 1. ファイルに実行権限を付与します: ```shell chmod +x /bin/gitlab-runner ``` 1. GitLab Runnerをエクスポートし、`PATH`に追加します: ```shell export GITLAB_RUNNER=/gitlab-runner/bin export PATH=${GITLAB_RUNNER}:${PATH} ``` 1. [Runnerを登録します](../register/_index.md)。 ## GitLab Runnerを実行 {#run-gitlab-runner} GitLab Runnerは、直接または開始されたタスクとして実行できます。 ### GitLab Runnerを直接実行 {#run-gitlab-runner-directly} 実行可能ファイルを呼び出すことによってGitLab Runnerを実行するには: 1. `/bin`ディレクトリに移動します。 1. サービスを開始します。 ```shell gitlab-runner start ``` ### 開始されたタスクとしてGitLab Runnerを実行 {#run-gitlab-runner-as-a-started-task} GitLab Runnerプロセスを使用可能な状態に保つには、開始されたタスクとして実行します。 1. 実行可能ファイルを`gitlab-runner.sh` Shellスクリプトでラップします: ```shell #! /bin/sh /bin/gitlab-runner start ``` 1. `jcl`開始されたタスクプログラムを定義し、継続的なプロセスとして実行するために実行します: ```jcl //GLRST PROC CNFG='' //* //GLRST EXEC PGM=BPXBATSL,REGION=0M,TIME=NOLIMIT, // PARM='PGM &CNFG./gitlab-runner.sh' //STDOUT DD SYSOUT=* //STDERR DD SYSOUT=* //* // PEND ``` ================================================ FILE: docs-locale/ja-jp/monitoring/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments description: Prometheusメトリクス。 title: GitLab Runnerの使用状況をモニタリングする --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} [Prometheus](https://prometheus.io)を使用してGitLab Runnerをモニタリングできます。 ## 埋め込みPrometheusメトリクス {#embedded-prometheus-metrics} GitLab RunnerにはネイティブのPrometheusメトリクスが含まれており、`/metrics`パス上の埋め込みHTTPサーバーを使用して公開できます。このサーバーが有効になっている場合、Prometheusモニタリングシステムによりスクレイピングしたり、他のHTTPクライアントでアクセスしたりできます。 公開される情報には以下のものが含まれます: - Runnerのビジネスロジックメトリクス(現時点で実行中のジョブの数など) - Go固有のプロセスメトリクス(ガベージコレクションの統計、goroutine、memstatなど) - 一般的なプロセスメトリクス(メモリ使用量、CPU使用量、ファイル記述子の使用量など) - ビルドバージョン情報 メトリクスの形式は、Prometheusの[公開形式](https://prometheus.io/docs/instrumenting/exposition_formats/)の仕様に記載されています。 これらのメトリクスは、オペレーターがRunnerをモニタリングしてインサイトを得るための手段として提供されています。たとえば、Runnerホストの負荷平均の増加が、処理されたジョブの増加に関連しているかどうかを確認できます。あるいは、マシンのクラスターを実行しており、インフラストラクチャに変更を加えるために、ビルドの傾向を追跡することがあります。 ### Prometheusについて詳しく理解する {#learning-more-about-prometheus} このHTTPエンドポイントをスクレイピングし、収集されたメトリクスを使用するようにPrometheusサーバーを設定するには、Prometheusの[入門](https://prometheus.io/docs/prometheus/latest/getting_started/)ガイドを参照してください。Prometheusの設定方法の詳細については、[設定](https://prometheus.io/docs/prometheus/latest/configuration/configuration/)セクションを参照してください。アラート設定の詳細については、[アラートルール](https://prometheus.io/docs/prometheus/latest/configuration/alerting_rules/)と[Alertmanager](https://prometheus.io/docs/alerting/latest/alertmanager/)を参照してください。 ## 利用可能なメトリクス {#available-metrics} 利用可能なすべてのメトリクスのリストを確認するには、メトリクスエンドポイントを設定して有効にした後に、メトリクスエンドポイントに対して`curl`を実行します。たとえば、リッスンポート`9252`を使用して設定されているローカルRunnerの場合は次のようになります: ```shell $ curl -s "http://localhost:9252/metrics" | grep -E "# HELP" # HELP gitlab_runner_api_request_statuses_total The total number of api requests, partitioned by runner, endpoint and status. # HELP gitlab_runner_autoscaling_machine_creation_duration_seconds Histogram of machine creation time. # HELP gitlab_runner_autoscaling_machine_states The current number of machines per state in this provider. # HELP gitlab_runner_concurrent The current value of concurrent setting # HELP gitlab_runner_errors_total The number of caught errors. # HELP gitlab_runner_limit The current value of limit setting # HELP gitlab_runner_request_concurrency The current number of concurrent requests for a new job # HELP gitlab_runner_request_concurrency_exceeded_total Count of excess requests above the configured request_concurrency limit # HELP gitlab_runner_version_info A metric with a constant '1' value labeled by different build stats fields. ... ``` リストには[Go固有のプロセスメトリクス](https://github.com/prometheus/client_golang/blob/v1.19.0/prometheus/go_collector.go)が含まれています。Go固有のプロセスを含まない利用可能なメトリクスのリストについては、[Runnerのモニタリング](../fleet_scaling/_index.md#monitoring-runners)を参照してください。 ## `pprof` HTTPエンドポイント {#pprof-http-endpoints} メトリクスによるGitLab Runnerプロセスの内部状態の情報は貴重ですが、場合によっては、実行中のプロセスをリアルタイムで調べる必要があります。この目的で`pprof` HTTPエンドポイントを導入しました。 `pprof`エンドポイントは、`/debug/pprof/`パス上の埋め込みHTTPサーバーを介して利用できます。 `pprof`の使用方法の詳細については、その[ドキュメント](https://pkg.go.dev/net/http/pprof)を参照してください。 ## メトリクスHTTPサーバーの設定 {#configuration-of-the-metrics-http-server} {{< alert type="note" >}} メトリクスサーバーは、GitLab Runnerプロセスの内部状態に関するデータをエクスポートするため、一般に公開すべきではありません。 {{< /alert >}} 次のいずれかの方法を使用して、メトリクスHTTPサーバーを設定します: - `config.toml`ファイルで`listen_address`グローバル設定オプションを使用します。 - `run`コマンドの`--listen-address`コマンドラインオプションを使用します。 - Helm Chartを使用するRunnerの場合は、`values.yaml`で次の手順に従います: 1. `metrics`オプションを設定します: ```yaml ## Configure integrated Prometheus metrics exporter ## ## ref: https://docs.gitlab.com/runner/monitoring/#configuration-of-the-metrics-http-server ## metrics: enabled: true ## Define a name for the metrics port ## portName: metrics ## Provide a port number for the integrated Prometheus metrics exporter ## port: 9252 ## Configure a prometheus-operator serviceMonitor to allow automatic detection of ## the scraping target. Requires enabling the service resource below. ## serviceMonitor: enabled: true ... ``` 1. 設定されている`metrics`を取得するように`service`モニターを設定します: ```yaml ## Configure a service resource to allow scraping metrics by using ## prometheus-operator serviceMonitor service: enabled: true ## Provide additional labels for the service ## labels: {} ## Provide additional annotations for the service ## annotations: {} ... ``` `config.toml`ファイルにアドレスを追加する場合は、メトリクスHTTPサーバーを起動するために、Runnerプロセスを再起動する必要があります。 どちらの場合も、オプションは`[host]:`形式の文字列を受け入れます。各要素の意味は次のとおりです: - `host`には、IPアドレスまたはホスト名を使用できます。 - `port`は、有効なTCPポートまたはシンボリックサービス名(`http`など)です。すでに[Prometheusに割り当てられている](https://github.com/prometheus/prometheus/wiki/Default-port-allocations)ポート`9252`を使用する必要があります。 リッスンアドレスにポートが含まれていない場合は、デフォルトで`9252`になります。 アドレスの例: - `:9252`は、ポート`9252`のすべてのインターフェースでリッスンします。 - `localhost:9252`は、ポート`9252`のループバックインターフェースでリッスンします。 - `[2001:db8::1]:http`は、HTTPポート`80`のIPv6アドレス`[2001:db8::1]`でリッスンします。 少なくともLinux/Unixシステムでは、`1024`より下のポートでリッスンするには、root/管理者権限が必要であることに注意してください。 HTTPサーバーは、選択されている`host:port`で**認証なしで**開きます。メトリクスサーバーをパブリックインターフェースにバインドする場合は、ファイアウォールを使用してアクセス制御を制限するか、認可とアクセス制御のためにHTTPプロキシを追加します。 ## GitLab Runner Operatorによって管理されるGitLab Runnerをモニタリングします {#monitor-operator-managed-gitlab-runners} GitLab Runner Operatorによって管理されるGitLab Runnerは、スタンドアロンのGitLab Runnerインスタンスと同じ埋め込みPrometheusメトリクスサーバーを使用します。メトリクスサーバーは、`listenAddr`が`[::]:9252`に設定されており、ポート`9252`上のすべてのIPv6およびIPv4インターフェースでリッスンするように事前設定されています。 ### メトリクスポートを公開する {#expose-metrics-port} GitLab Runner Operatorによって管理されるGitLab Runnerのモニタリングとメトリクス収集を有効にするには、[Operatorが管理するGitLab Runnerをモニタリングする](#monitor-operator-managed-gitlab-runners)を参照してください。 #### メトリクスポートを設定する {#configure-the-metrics-port} 次のパッチをRunner設定の`podSpec`フィールドに追加します: ```yaml apiVersion: apps.gitlab.com/v1beta2 kind: Runner metadata: name: gitlab-runner spec: gitlabUrl: https://gitlab.example.com token: gitlab-runner-secret buildImage: alpine podSpec: name: "metrics-config" patch: | { "containers": [ { "name": "runner", "ports": [ { "name": "metrics", "containerPort": 9252, "protocol": "TCP" } ] } ] } patchType: "strategic" ``` この設定では: - `name`: 識別用のカスタム`PodSpec`に名前を割り当てます。 - `patch`: `PodSpec`に適用するJSONパッチを定義し、Runnerコンテナ上のポート`9252`を公開します。 - `patchType`: パッチを適用するために、`strategic`マージ戦略(デフォルト)を使用します。 - `port`: Kubernetesサービスで簡単に識別できるように、`metrics`として名前が付けられています。 #### Prometheusのスクレイピングを設定する {#configure-prometheus-scraping} Prometheus Operatorを使用する環境の場合は、Runnerポッドからメトリクスを直接スクレイプするための`PodMonitor`リソースを作成します: ```yaml apiVersion: monitoring.coreos.com/v1 kind: PodMonitor metadata: name: gitlab-runner-metrics namespace: kube-prometheus-stack labels: release: kube-prometheus-stack spec: selector: matchLabels: app.kubernetes.io/component: runner namespaceSelector: matchNames: - gitlab-runner-system podMetricsEndpoints: - port: metrics interval: 10s path: /metrics ``` `PodMonitor`構成を適用します: ```shell kubectl apply -f gitlab-runner-podmonitor.yaml ``` `PodMonitor`構成: - `selector`: `app.kubernetes.io/component: runner`ラベルが付いたポッドと一致します。 - `namespaceSelector`: スクレイピングを`gitlab-runner-system`ネームスペースに制限します。 - `podMetricsEndpoints`: メトリクスポート、スクレイプ間隔、パスを定義します。 #### Runnerの識別をメトリクスに追加する {#add-runner-identification-to-metrics} すべてのエクスポートされたメトリクスにRunnerの識別を追加するには、`PodMonitor`にrelabel設定を含めます: ```yaml podMetricsEndpoints: - port: metrics interval: 10s path: /metrics relabelings: - sourceLabels: [__meta_kubernetes_pod_label_app_kubernetes_io_name] targetLabel: runner_name ``` relabel設定: - 各Runnerポッドから`app.kubernetes.io/name`ラベルを抽出します(GitLab Runner Operatorによって自動的に設定されます)。 - そのポッドからのすべてのメトリクスに、`runner_name`ラベルとして追加します。 - 特定のRunnerインスタンスによるフィルターと集計メトリクスを有効にします。 次に示すのは、Runnerの識別情報を含むメトリクスの例です: ```prometheus gitlab_runner_concurrent{runner_name="my-gitlab-runner"} 10 gitlab_runner_jobs_running_total{runner_name="my-gitlab-runner"} 3 ``` #### Prometheusの直接スクレイプ設定 {#direct-prometheus-scrape-configuration} Prometheus Operatorを使用していない場合は、Prometheusスクレイプ設定でrelabel設定を直接追加できます: ```yaml scrape_configs: - job_name: 'gitlab-runner-operator' kubernetes_sd_configs: - role: pod namespaces: names: - gitlab-runner-system relabel_configs: - source_labels: [__meta_kubernetes_pod_label_app_kubernetes_io_name] target_label: runner_name metrics_path: /metrics scrape_interval: 10s ``` この設定では: - Kubernetesサービスディスカバリを使用して、`gitlab-runner-system`ネームスペース内のポッドを検索します。 - `app.kubernetes.io/name`ラベルを抽出し、メトリクスに`runner_name`として追加します。 ## Kubernetes以外のexecutorを使用するGitLab Runnerをモニタリングする {#monitor-gitlab-runner-with-executors-other-than-kubernetes} Kubernetes以外のexecutorを使用するGitLab Runnerデプロイメントの場合、Prometheus設定で外部ラベルを介してRunnerの識別を追加できます。 ### 外部ラベルを使用した静的な設定 {#static-configuration-with-external-labels} GitLab Runnerインスタンスをスクレイプし、識別ラベルを追加するようにPrometheusを設定します: ```yaml scrape_configs: - job_name: 'gitlab-runner' static_configs: - targets: ['runner1.example.com:9252'] labels: runner_name: 'production-runner-1' - targets: ['runner2.example.com:9252'] labels: runner_name: 'staging-runner-1' metrics_path: /metrics scrape_interval: 30s ``` この設定により、メトリクスにRunnerの識別が追加されます: ```prometheus gitlab_runner_concurrent{runner_name="production-runner-1"} 10 gitlab_runner_jobs_running_total{runner_name="staging-runner-1"} 3 ``` この設定により、次のことが可能になります: - 特定のRunnerインスタンスでメトリクスをフィルターします。 - Runner固有のダッシュボードとアラートを作成します。 - さまざまなRunnerデプロイメント全体のパフォーマンスを追跡する。 ### Operatorが管理するGitLab Runnerで利用可能なメトリクス {#available-metrics-for-operator-managed-gitlab-runners} GitLab Runner Operatorによって管理されるGitLab Runnerは、スタンドアロンのGitLab Runnerデプロイメントと同じメトリクスを公開します。利用可能なすべてのメトリクスを表示するには、`kubectl`を使用してメトリクスエンドポイントにアクセスします: ```shell kubectl port-forward pod/ 9252:9252 curl -s "http://localhost:9252/metrics" | grep -E "# HELP" ``` 利用可能なメトリクスの完全なリストについては、[利用可能なメトリクス](#available-metrics)を参照してください。 ### Operatorが管理するGitLab Runnerのセキュリティに関する考慮事項 {#security-considerations-for-operator-managed-gitlab-runners} GitLab Runner Operatorによって管理されるGitLab Runnerのメトリクス収集を設定する場合: - Kubernetes `NetworkPolicies`を使用して、承認されたモニタリングシステムへのアクセスを制限します。 - 本番環境でのメトリクススクレイピングには、`mutal` TLS暗号化の使用を検討してください。 ### Operatorが管理するGitLab Runnerモニタリングのトラブルシューティング {#troubleshooting-operator-managed-gitlab-runner-monitoring} #### メトリクスエンドポイントにアクセスできません {#metrics-endpoint-not-accessible} メトリクスエンドポイントにアクセスできない場合: 1. ポッドの仕様にメトリクスポート設定が含まれていることを検証する。 1. Runnerポッドが実行中で正常であることを確認します: ```shell kubectl get pods -l app.kubernetes.io/component=runner -n gitlab-runner-system kubectl describe pod -n gitlab-runner-system ``` 1. メトリクスエンドポイントへの接続をテストします: ```shell kubectl port-forward pod/ 9252:9252 -n gitlab-runner-system curl "http://localhost:9252/metrics" ``` #### Prometheusにメトリクスが表示されない {#missing-metrics-in-prometheus} Prometheusにメトリクスが表示されない場合: 1. `PodMonitor`が正しく設定され、適用されていることを検証する。 1. ネームスペースとラベルセレクターがRunnerポッドと一致することを確認します。 1. スクレイピングエラーのPrometheusログをレビューします。 1. `PodMonitor`がPrometheus Operatorによって検出可能であることを検証します: ```shell kubectl get podmonitor gitlab-runner-metrics -n kube-prometheus-stack kubectl describe podmonitor gitlab-runner-metrics -n kube-prometheus-stack ``` ================================================ FILE: docs-locale/ja-jp/register/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: Runnerの登録 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} {{< history >}} - GitLab Runner 15.0で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/3414)。登録リクエストの形式が変更されたため、GitLab Runnerは以前のバージョンのGitLabと通信できなくなりました。GitLabのバージョンに適したバージョンのGitLab Runnerを使用するか、GitLabアプリケーションをアップグレードする必要があります。 {{< /history >}} Runnerの登録とは、Runnerを1つ以上のGitLabインスタンスに関連付けるプロセスです。GitLabインスタンスからジョブを取得するには、Runnerを登録する必要があります。 ## 要件 {#requirements} Runnerを登録する前に: - [GitLab Runner](../install/_index.md)を、GitLabがインストールされているサーバーとは別のサーバーにインストールします。 - DockerでRunnerを登録するために、[DockerコンテナにGitLab Runnerをインストール](../install/docker.md)します。 ## Runner認証トークンで登録する {#register-with-a-runner-authentication-token} {{< history >}} - GitLab 15.10で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/issues/29613)されました。 {{< /history >}} 前提要件: - Runner認証トークンを取得します。次のいずれかの方法があります: - インスタンス、グループ、またはプロジェクトのRunnerを作成します。手順については、[manageランナー](https://docs.gitlab.com/ci/runners/runners_scope)を参照してください。 - `config.toml`ファイルの中でRunner認証トークンを見つける。Runner認証トークンのプレフィックスは`glrt-`です。 Runnerを登録すると、`config.toml`に設定が保存されます。 [Runner認証トークン](https://docs.gitlab.com/security/tokens/#runner-authentication-tokens)を使用してRunnerを登録するには: 1. registerコマンドを実行します: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register ``` プロキシの背後にいる場合は、環境変数を追加してから、登録コマンドを実行します: ```shell export HTTP_PROXY=http://yourproxyurl:3128 export HTTPS_PROXY=http://yourproxyurl:3128 sudo -E gitlab-runner register ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register ``` {{< /tab >}} {{< tab title="Docker" >}} コンテナを使用して登録するには、次のいずれかを実行します: - 適切な設定ボリュームマウントによる有効期間の短い`gitlab-runner`コンテナを使用します: - ローカルシステムボリュームマウントの場合: ```shell docker run --rm -it -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register ``` インストール中に`/srv/gitlab-runner/config`以外の設定ボリュームを使用した場合は、適切なボリュームでコマンドを更新します。 - Dockerボリュームマウントの場合: ```shell docker run --rm -it -v gitlab-runner-config:/etc/gitlab-runner gitlab/gitlab-runner:latest register ``` - アクティブなRunnerコンテナ内で実行可能ファイルを使用します: ```shell docker exec -it gitlab-runner gitlab-runner register ``` {{< /tab >}} {{< /tabs >}} 1. GitLabのURLを入力します: - GitLab Self-ManagedのRunnerの場合は、GitLabインスタンスのURLを使用します。たとえば、プロジェクトが`gitlab.example.com/yourname/yourproject`でホストされている場合、GitLabインスタンスのURLは`https://gitlab.example.com`です。 - GitLab.comのRunnerの場合、GitLabインスタンスのURLは`https://gitlab.com`です。 1. Runner認証トークンを入力します。 1. Runnerの説明を入力します。 1. ジョブタグをカンマで区切って入力します。 1. (オプション)Runnerのメンテナンスノートを入力します。 1. [executor](../executors/_index.md)のタイプを入力します。 - 異なる設定の複数のRunnerを同じホストマシンに登録するには、それぞれについて`register`コマンドを繰り返します。 - 複数のホストマシンに同じ設定を登録するには、各Runnerの登録に同じRunner認証トークンを使用します。詳細については、[Runner設定の再利用](../fleet_scaling/_index.md#reusing-a-runner-configuration)を参照してください。 [非対話モード](../commands/_index.md#non-interactive-registration)を使用して、追加の引数を使用してRunnerを登録することもできます: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker-windows" \ --docker-image mcr.microsoft.com/windows/servercore:1809_amd64 \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --token "$RUNNER_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" ``` {{< /tab >}} {{< /tabs >}} ## Runner登録トークンで登録する(非推奨) {#register-with-a-runner-registration-token-deprecated} {{< alert type="warning" >}} Runnerの登録トークンといくつかのRunnerの設定引数は[非推奨](https://gitlab.com/gitlab-org/gitlab/-/issues/380872)になりました。これらは、GitLab 20.0での削除が予定されています。代わりにRunner認証トークンを使用してください。詳細については、[新しいRunner登録ワークフローに移行する](https://docs.gitlab.com/ci/runners/new_creation_workflow/)を参照してください。 {{< /alert >}} 前提要件: - 管理者エリアでRunner登録トークンが[有効](https://docs.gitlab.com/administration/settings/continuous_integration/#allow-runner-registrations-tokens)になっている必要があります。 - 登録したいインスタンス、グループ、またはプロジェクトでRunner登録トークンを取得します。手順については、[manageランナー](https://docs.gitlab.com/ci/runners/runners_scope)を参照してください。 Runnerを登録すると、`config.toml`に設定が保存されます。 [Runner登録トークン](https://docs.gitlab.com/security/tokens/#runner-registration-tokens-deprecated)を使用してRunnerを登録するには: 1. registerコマンドを実行します: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register ``` プロキシの背後にいる場合は、環境変数を追加してから、登録コマンドを実行します: ```shell export HTTP_PROXY=http://yourproxyurl:3128 export HTTPS_PROXY=http://yourproxyurl:3128 sudo -E gitlab-runner register ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register ``` {{< /tab >}} {{< tab title="Docker" >}} インストール中に作成したコンテナを登録するため、有効期間の短い`gitlab-runner`コンテナを起動するには: - ローカルシステムボリュームマウントの場合: ```shell docker run --rm -it -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register ``` インストール中に`/srv/gitlab-runner/config`以外の設定ボリュームを使用した場合は、適切なボリュームでコマンドを更新します。 - Dockerボリュームマウントの場合: ```shell docker run --rm -it -v gitlab-runner-config:/etc/gitlab-runner gitlab/gitlab-runner:latest register ``` {{< /tab >}} {{< /tabs >}} 1. GitLabのURLを入力します: - GitLab Self-ManagedのRunnerの場合は、GitLabインスタンスのURLを使用します。たとえば、プロジェクトが`gitlab.example.com/yourname/yourproject`でホストされている場合、GitLabインスタンスのURLは`https://gitlab.example.com`です。 - GitLab.comの場合、GitLabインスタンスのURLは`https://gitlab.com`です。 1. Runnerを登録するために取得したトークンを入力します。 1. Runnerの説明を入力します。 1. ジョブタグをカンマで区切って入力します。 1. (オプション)Runnerのメンテナンスノートを入力します。 1. [executor](../executors/_index.md)のタイプを入力します。 異なる設定の複数のRunnerを同じホストマシンに登録するには、それぞれについて`register`コマンドを繰り返します。 [非対話モード](../commands/_index.md#non-interactive-registration)を使用して、追加の引数を使用してRunnerを登録することもできます: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker-windows" \ --docker-image mcr.microsoft.com/windows/servercore:1809_amd64 \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com/" \ --registration-token "$PROJECT_REGISTRATION_TOKEN" \ --executor "docker" \ --docker-image alpine:latest \ --description "docker-runner" \ --maintenance-note "Free-form maintainer notes about this runner" \ --tag-list "docker,aws" \ --run-untagged="true" \ --locked="false" \ --access-level="not_protected" ``` {{< /tab >}} {{< /tabs >}} - `--access-level`は、[保護されたRunner](https://docs.gitlab.com/ci/runners/configure_runners/#prevent-runners-from-revealing-sensitive-information)を作成するかどうかを設定します。 - 保護されたRunnerの場合は、`--access-level="ref_protected"`パラメータを使用します。 - 保護されていないRunnerの場合は、`--access-level="not_protected"`を使用するか、値を未定義のままにします。 - `--maintenance-note`を使用すると、Runnerのメンテナンスに役立つ情報を追加できます。最大長は255文字です。 ### レガシー互換登録プロセス {#legacy-compatible-registration-process} {{< history >}} - GitLab 16.2で[導入](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/4157)されました。 {{< /history >}} Runnerの登録トークンといくつかのRunnerの設定引数は[非推奨](https://gitlab.com/gitlab-org/gitlab/-/issues/379743)になりました。これらは、GitLab 20.0での削除が予定されています。自動化ワークフローへの影響を最小限にするため、レガシーパラメータ`--registration-token`の中でRunner認証トークンが指定されている場合、`legacy-compatible registration process`がトリガーされます。 レガシー互換登録プロセスでは、次のコマンドラインパラメータは無視されます。これらのパラメータは、UIまたはAPIでRunnerが作成された場合にのみ設定可能です。 - `--locked` - `--access-level` - `--run-untagged` - `--maximum-timeout` - `--paused` - `--tag-list` - `--maintenance-note` ## 設定テンプレートを使用して登録する {#register-with-a-configuration-template} 設定テンプレートを使用すると、`register`コマンドでサポートされていない設定でRunnerを登録できます。 前提要件: - テンプレートファイルの格納場所となるボリュームは、GitLab Runnerコンテナにマウントされている必要があります。 - Runner認証トークンまたは登録トークン: - Runner認証トークンを取得します(推奨)。次のいずれかの方法があります: - 登録したいインスタンス、グループ、またはプロジェクトでRunner認証トークンを取得します。手順については、[manageランナー](https://docs.gitlab.com/ci/runners/runners_scope)を参照してください。 - `config.toml`ファイルの中でRunner認証トークンを見つける。Runner認証トークンのプレフィックスは`glrt-`です。 - (非推奨)インスタンス、グループ、またはプロジェクトの各RunnerのためのRunner登録トークンを取得する。手順については、[manageランナー](https://docs.gitlab.com/ci/runners/runners_scope)を参照してください。 設定テンプレートは、次の理由により`register`コマンドの一部の引数をサポートしていない自動化環境で使用できます: - 環境に基づく環境変数のサイズ制限。 - Kubernetes用のexecutorボリュームで使用できないコマンドラインオプション。 {{< alert type="warning" >}} 設定テンプレートでサポートされるのは単一の[`[[runners]]`](../configuration/advanced-configuration.md#the-runners-section)セクションだけであり、グローバルオプションはサポートされません。 {{< /alert >}} Runnerを登録するには、次のようにします: 1. `.toml`形式の設定テンプレートファイルを作成し、仕様を追加します。次に例を示します: ```toml [[runners]] [runners.kubernetes] [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` 1. ファイルのパスを追加します。次のいずれかを使用できます: - コマンドラインの[非対話モード](../commands/_index.md#non-interactive-registration): ```shell $ sudo gitlab-runner register \ --template-config /tmp/test-config.template.toml \ --non-interactive \ --url "https://gitlab.com" \ --token \ "# --registration-token if using the deprecated runner registration token" --name test-runner \ --executor kubernetes --host = "http://localhost:9876/" ``` - `.gitlab.yaml`ファイルの中の環境変数: ```yaml variables: TEMPLATE_CONFIG_FILE = ``` 環境変数を更新する場合、`register`コマンドでファイルパスを毎回追加する必要はありません。 Runnerを登録すると、`config.toml`内で作成された`[[runners]]`エントリと設定テンプレートの設定がマージされます: ```toml concurrent = 1 check_interval = 0 [session_server] session_timeout = 1800 [[runners]] name = "test-runner" url = "https://gitlab.com" token = "glrt-" executor = "kubernetes" [runners.kubernetes] host = "http://localhost:9876/" bearer_token_overwrite_allowed = false image = "" namespace = "" namespace_overwrite_allowed = "" privileged = false service_account_overwrite_allowed = "" pod_labels_overwrite_allowed = "" pod_annotations_overwrite_allowed = "" [runners.kubernetes.volumes] [[runners.kubernetes.volumes.empty_dir]] name = "empty_dir" mount_path = "/path/to/empty_dir" medium = "Memory" ``` テンプレートの設定がマージされるのは、次の場合のみです: - 空の文字列 - nullまたは存在しないエントリ - ゼロ値 コマンドライン引数と環境変数は、設定テンプレートの設定よりも優先されます。たとえば、テンプレートでは`docker`executorを指定し、コマンドラインでは`shell`を指定した場合、設定されるexecutorは`shell`になります。 ## GitLab Community Editionインテグレーションテスト用にRunnerを登録する {#register-a-runner-for-gitlab-community-edition-integration-tests} GitLab Community Editionインテグレーションをテストするには、設定テンプレートを使用して、制限付きDocker executorでRunnerを登録します。 1. [プロジェクトRunner](https://docs.gitlab.com/ci/runners/runners_scope/#create-a-project-runner-with-a-runner-authentication-token)を作成します。 1. `[[runners.docker.services]]`セクションを含むテンプレートを作成します: ```shell $ cat > /tmp/test-config.template.toml << EOF [[runners]] [runners.docker] [[runners.docker.services]] name = "mysql:latest" [[runners.docker.services]] name = "redis:latest" EOF ``` 1. Runnerを登録します: {{< tabs >}} {{< tab title="Linux" >}} ```shell sudo gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="macOS" >}} ```shell gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="Windows" >}} ```shell .\gitlab-runner.exe register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="FreeBSD" >}} ```shell sudo -u gitlab-runner -H /usr/local/bin/gitlab-runner register --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< tab title="Docker" >}} ```shell docker run --rm -v /srv/gitlab-runner/config:/etc/gitlab-runner gitlab/gitlab-runner register \ --non-interactive \ --url "https://gitlab.com" \ --token "$RUNNER_AUTHENTICATION_TOKEN" \ --template-config /tmp/test-config.template.toml \ --description "gitlab-ce-ruby-3.1" \ --executor "docker" \ --docker-image ruby:3.1 ``` {{< /tab >}} {{< /tabs >}} その他の設定オプションについては、[高度な設定](../configuration/advanced-configuration.md)を参照してください。 ## DockerによるRunnerの登録 {#registering-runners-with-docker} DockerコンテナによるRunner登録後: - 設定が設定ボリュームに書き込まれます。たとえば、`/srv/gitlab-runner/config`などです。 - コンテナが設定ボリュームを使用してRunnerを読み込みます。 {{< alert type="note" >}} `gitlab-runner restart`がDockerコンテナ内で実行される場合、GitLab Runnerは既存のプロセスを再起動せず、新しいプロセスを開始します。設定変更を適用するには、Dockerコンテナを再起動します。 {{< /alert >}} ## トラブルシューティング {#troubleshooting} ### エラー: `Check registration token` {#error-check-registration-token} `check registration token`(登録トークンを確認してください)エラーメッセージは、登録中に入力したRunner登録トークンをGitLabインスタンスが認識しない場合に表示されます。この問題は、次のいずれかの場合に発生する可能性があります: - GitLabで、インスタンス、グループ、またはプロジェクトのRunner登録トークンが変更された。 - 正しくないRunner登録トークンが入力された。 このエラーが発生した場合は、GitLab管理者に次のことを依頼できます: - Runner登録トークンが有効であることを確認する。 - プロジェクトまたはグループでRunner登録が[許可されている](https://docs.gitlab.com/administration/settings/continuous_integration/#restrict-runner-registration-by-all-members-in-a-group)ことを確認する。 ### エラー: `410 Gone - runner registration disallowed` {#error-410-gone---runner-registration-disallowed} `410 Gone - runner registration disallowed`(Runner登録が無効です)エラーメッセージは、登録トークンによるRunner登録が無効になっている場合に表示されます。 このエラーが発生した場合は、GitLab管理者に次のことを依頼できます: - Runner登録トークンが有効であることを確認する。 - インスタンスでのRunner登録が[許可されている](https://docs.gitlab.com/administration/settings/continuous_integration/#allow-runner-registrations-tokens)ことを確認する。 - グループまたはプロジェクトのRunner登録トークンの場合、それぞれ対応するグループやプロジェクトでのRunner登録が[許可されている](https://docs.gitlab.com/ci/runners/runners_scope/#enable-use-of-runner-registration-tokens-in-projects-and-groups)ことを確認する。 ================================================ FILE: docs-locale/ja-jp/runner_autoscale/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerのオートスケール --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerのオートスケールを使用すると、パブリッククラウドインスタンスでRunnerを自動的にスケールできます。オートスケーラーを使用するようにRunnerを設定すると、クラウドインフラストラクチャ上で複数のジョブを同時に実行することで、CI/CDジョブのワークロード増加に対処できます。 パブリッククラウドインスタンスのオートスケールオプションに加えて、次のコンテナオーケストレーションソリューションを使用して、Runnerフリートをホストおよびスケールできます。 - Red Hat OpenShift Kubernetesクラスター - Kubernetesクラスター: AWS EKS、Azure、オンプレミス - AWS FargateのAmazon Elastic Container Servicesクラスター ## Runnerマネージャーを設定する {#configure-the-runner-manager} GitLab Runnerのオートスケール(Docker Machine AutoscalingソリューションとGitLab Runner Autoscalerの両方)を使用するようにRunnerマネージャーを設定する必要があります。 Runnerマネージャーは、オートスケール用に複数のRunnerを作成するRunnerの一種です。GitLabに対しジョブを継続的にポーリングし、パブリッククラウドインフラストラクチャと連携して、ジョブを実行するための新しいインスタンスを作成します。Runnerマネージャーは、GitLab Runnerがインストールされているホストマシン上で実行する必要があります。DockerとGitLab Runnerがサポートするディストリビューション(Ubuntu、Debian、CentOS、RHELなど)を選択します。 1. Runnerマネージャーをホストするインスタンスを作成します。これはスポットインスタンス(AWS)またはスポット仮想マシン(GCP、Azure)**であってはなりません**。 1. [インスタンス](../install/linux-repository.md)にGitLab Runnerをインストールします。 1. クラウドプロバイダーの認証情報をRunnerマネージャーのホストマシンに追加します。 {{< alert type="note" >}} コンテナ内でRunnerマネージャーをホストできます。[GitLab.comでホストされるRunner](https://docs.gitlab.com/ci/runners/)の場合、Runnerマネージャーは仮想マシンインスタンスでホストされます。 {{< /alert >}} ### GitLab Runner Docker Machine Autoscalingの認証情報の設定例 {#example-credentials-configuration-for-gitlab-runner-docker-machine-autoscaling} このスニペットは、ファイル`config.toml`の`runners.machine`セクションの中にあります。 ``` toml [runners.machine] IdleCount = 1 IdleTime = 1800 MaxBuilds = 10 MachineDriver = "amazonec2" MachineName = "gitlab-docker-machine-%s" MachineOptions = [ "amazonec2-access-key=XXXX", "amazonec2-secret-key=XXXX", "amazonec2-region=eu-central-1", "amazonec2-vpc-id=vpc-xxxxx", "amazonec2-subnet-id=subnet-xxxxx", "amazonec2-zone=x", "amazonec2-use-private-address=true", "amazonec2-security-group=xxxxx", ] ``` {{< alert type="note" >}} 認証情報ファイルはオプションです。AWS環境のRunnerマネージャーには[AWSアイデンティティおよびアクセス管理](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html)(IAM)インスタンスプロファイルを使用できます。AWSでRunnerマネージャーをホストしない場合は、認証情報ファイルを使用できます。 {{< /alert >}} ## 耐障害性のあるデザインを実装する {#implement-a-fault-tolerant-design} 耐障害性のあるデザインを作成し、Runnerマネージャーホストの障害を防ぐには、同じRunnerタグを使用する少なくとも2つのRunnerマネージャーから始めます。 たとえばGitLab.comでは、[LinuxでホストされるRunner](https://docs.gitlab.com/ci/runners/hosted_runners/linux/)に対して複数のRunnerマネージャーが設定されています。各Runnerマネージャーにはタグ`saas-linux-small-amd64`があります。 組織のCI/CDワークロードの効率性とパフォーマンスのバランスを取るためにオートスケールパラメータを調整するときには、可観測性とRunnerフリートのメトリクスを使用します。 ## Runnerのオートスケールexecutorを設定する {#configure-runner-autoscaling-executors} Runnerマネージャーを設定したら、オートスケールに固有のexecutorを設定します: - [インスタンスExecutor](../executors/instance.md) - [Docker Autoscaling Executor](../executors/docker_autoscaler.md) - [Docker Machine Executor](../executors/docker_machine.md) {{< alert type="note" >}} Instance executorとDocker Autoscaling executorを使用してください。これらのexecutorは、Docker Machineオートスケーラーに代わるテクノロジーを構成しています。 {{< /alert >}} ================================================ FILE: docs-locale/ja-jp/runner_autoscale/gitlab-runner-autoscaler.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab Runnerインスタンスグループオートスケーラー --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerインスタンスグループオートスケーラーは、Docker Machineをベースとしたオートスケールテクノロジーの後継機能です。GitLab Runnerインスタンスグループのオートスケールソリューションのコンポーネントは次のとおりです: - taskscaler: 自動スケールロジック、ブックキーピングを管理し、クラウドプロバイダーのインスタンスの自動スケールグループを使用するRunnerインスタンスのフリートを作成します。 - [Fleeting](../fleet_scaling/fleeting.md): クラウドプロバイダー仮想マシンの抽象化。 - クラウドプロバイダープラグイン: ターゲットクラウドプラットフォームへのAPIコールを処理します。プラグイン開発フレームワークを使用して実装されます。 GitLab Runnerのインスタンスグループオートスケールは、次のように動作します: 1. Runnerマネージャーは、GitLabジョブを継続的にポーリングします。 1. 応答として、GitLabはジョブのペイロードをRunnerマネージャーに送信します。 1. Runnerマネージャーは、パブリッククラウドインフラストラクチャとやり取りして、ジョブを実行するための新しいインスタンスを作成します。 1. Runnerマネージャーは、これらのジョブをオートスケールプール内の利用可能なRunnerに配布します。 ![GitLab Next Runner Autoscalingの概要](img/next-runner-autoscaling-overview.png) ## Runnerマネージャーを設定する {#configure-the-runner-manager} GitLab Runnerインスタンスグループオートスケーラーを使用するには、[Runnerマネージャーを設定](_index.md#configure-the-runner-manager)する必要があります。 1. Runnerマネージャーをホストするインスタンスを作成します。これはスポットインスタンス(AWS)またはスポット仮想マシン(GCP、Azure)**であってはなりません**。 1. インスタンスに[GitLab Runnerをインストール](../install/linux-repository.md)します。 1. クラウドプロバイダーの認証情報をRunnerマネージャーのホストマシンに追加します。 {{< alert type="note" >}} コンテナ内でRunnerマネージャーをホストできます。GitLab.comおよびGitLab Dedicatedの[ホストされたRunner](https://docs.gitlab.com/ci/runners/)の場合、Runnerマネージャーは仮想マシンインスタンスでホストされます。 {{< /alert >}} ### GitLab Runnerインスタンスグループオートスケーラーの認証情報の設定例 {#example-credentials-configuration-for-gitlab-runner-instance-group-autoscaler} AWS環境のRunnerマネージャーには[AWSアイデンティティおよびアクセス管理](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-ec2_instance-profiles.html)(IAM)インスタンスプロファイルを使用できます。AWSでRunnerマネージャーをホストしない場合は、認証情報ファイルを使用できます。 次に例を示します: ``` toml ## credentials_file [default] aws_access_key_id=__REDACTED__ aws_secret_access_key=__REDACTED__ ``` 認証情報ファイルはオプションです。 ## サポートされているパブリッククラウドインスタンス {#supported-public-cloud-instances} パブリッククラウドプロバイダーのコンピューティングインスタンスでは、次のオートスケールオプションがサポートされています: - Amazon Web Services EC2インスタンス - Google Compute Engine - Microsoft Azure Virtual Machines これらのクラウドインスタンスは、GitLab Runner Docker Machineオートスケーラーでもサポートされています。 ## サポートされているプラットフォーム {#supported-platforms} | executor | Linux | macOS | Windows | |----------------------------|--------------------------------------|--------------------------------------|--------------------------------------| | インスタンスexecutor | {{< icon name="check-circle" >}}対応 | {{< icon name="check-circle" >}}対応 | {{< icon name="check-circle" >}}対応 | | Docker Autoscaler executor | {{< icon name="check-circle" >}}対応 | {{< icon name="dotted-circle" >}}非対応 | {{< icon name="check-circle" >}}対応 | ================================================ FILE: docs-locale/ja-jp/security/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: 自己管理Runnerのセキュリティ --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab CI/CDパイプラインは、単純または複雑なDevOps自動化タスクに使用されるワークフロー自動化エンジンです。これらのパイプラインはリモートコード実行サービスを有効にするため、セキュリティリスクを軽減するために、以下のプロセスを実装する必要があります: - テクノロジースタック全体のセキュリティを設定するための体系的なアプローチ。 - プラットフォームの設定と使用に関する継続的かつ厳格なレビュー。 自己管理Runner上でGitLab CI/CDジョブを実行する場合、コンピューティングインフラストラクチャとネットワークにセキュリティリスクが存在します。 RunnerはCI/CDジョブで定義されたコードを実行します。プロジェクトのリポジトリのデベロッパーロールを持つすべてのユーザーは、意図的であるかどうかにかかわらず、Runnerをホストする環境のセキュリティを侵害する可能性があります。 自己管理Runnerが一時的でなく、複数のプロジェクトに使用されている場合、このリスクはさらに高まります。 - 悪意のあるコードが埋め込まれたリポジトリからのジョブは、一時的でないRunnerがサービスを提供する他のリポジトリのセキュリティを侵害する可能性があります。 - executorによっては、ジョブはRunnerがホストされている仮想マシンに悪意のあるコードをインストールする可能性があります。 - 侵害された環境で実行されているジョブに公開されたシークレット変数トークン(`CI_JOB_TOKEN`を含むが、これに限定されない)が盗まれる可能性があります。 - デベロッパーロールを持つユーザーは、サブモジュールのアップストリームプロジェクトへのアクセス権を持っていなくても、プロジェクトに関連付けられたサブモジュールにアクセスできます。 ## さまざまなexecutorのセキュリティリスク {#security-risks-for-different-executors} 使用しているexecutorによっては、さまざまなセキュリティリスクに直面する可能性があります。 ### Shell executorの使用 {#usage-of-shell-executor} **`shell`executorでビルドを実行すると、Runnerホストとネットワークに高いセキュリティリスクが存在します**。ジョブはGitLab Runnerのユーザーの権限で実行され、このサーバーで実行されている他のプロジェクトからコードを盗む可能性があります。信頼できるビルドを実行する場合にのみ使用してください。 ### Docker executorの使用 {#usage-of-docker-executor} **特権のないモードで実行する場合、Dockerは安全であると見なすことができます**。このような設定をより安全にするには、`sudo`を無効にするか、`SETUID`および`SETGID`機能を削除して、ルート以外のユーザーとしてDockerコンテナ内でジョブを実行します。 よりきめ細かいアクセスレベルは、`cap_add`/`cap_drop`設定を介して、特権のないモードで設定できます。 {{< alert type="warning" >}} Dockerの特権コンテナは、ホストVMのすべてのルート機能を備えています。詳細については、[ランタイム特権とLinux機能](https://docs.docker.com/engine/containers/run/#runtime-privilege-and-linux-capabilities)に関する公式Dockerドキュメントをご覧ください {{< /alert >}} **特権モードでコンテナを実行することはお勧めしません**。 特権モードが有効になっている場合、CI/CDジョブを実行しているユーザーは、Runnerのホストシステムへの完全なルートアクセス権を取得し、ボリュームをマウントおよびデタッチするアクセスレベルを取得し、ネストされたコンテナを実行できます。 特権モードを有効にすると、すべてのコンテナのセキュリティメカニズムが効果的に無効になり、ホストが特権エスカレーションにさらされ、コンテナブレイクアウトが発生する可能性があります。 Docker Machine Executorを使用する場合は、`MaxBuilds = 1`設定を使用することを強くお勧めします。これにより、(特権モードによって導入されたセキュリティの脆弱性により侵害される可能性のある)単一のオートスケールVMが1つのジョブのみを処理するために使用されます。 ### `if-not-present`プルポリシーでの非公開Dockerイメージの使用 {#usage-of-private-docker-images-with-if-not-present-pull-policy} [高度な設定:プライベートコンテナレジストリの使用](../configuration/advanced-configuration.md#use-a-private-container-registry)で説明されているプライベートDockerイメージのサポートを使用する場合は、`always`を`pull_policy`値として使用する必要があります。特に、DockerまたはKubernetes executorを使用してパブリックインスタンスRunnerをホストしている場合は、`always`プルポリシーを使用する必要があります。 プルポリシーが`if-not-present`に設定されている例を考えてみましょう: 1. ユーザーAは、`registry.example.com/image/name`にプライベートイメージを持っています。 1. ユーザーAは、インスタンスRunnerでビルドを開始します: ビルドは、レジストリの認可後にレジストリ認証情報を受け取り、イメージをプルします。 1. イメージは、インスタンスRunnerのホストに保存されます。 1. ユーザーBは、`registry.example.com/image/name`のプライベートイメージにアクセスできません。 1. ユーザーBは、ユーザーAと同じインスタンスRunnerでこのイメージを使用するビルドを開始します: Runnerはイメージのローカルバージョンを見つけ、**イメージが認証情報の欠落によりプルできなかった場合でも**、それを使用します。 したがって、(プライベートとパブリックのアクセスレベルが混在する)さまざまなユーザーやさまざまなプロジェクトで使用できるRunnerをホストする場合は、`if-not-present`をプルポリシー値として使用しないでください。代わりに、以下を使用します: - `never` - ユーザーが事前にダウンロードしたイメージのみを使用するように制限する場合。 - `always` - ユーザーにあらゆるレジストリからイメージをダウンロードする可能性を与えたい場合。 `if-not-present`プルポリシーは、信頼できるビルドおよびユーザーが使用する特定のRunnerに**のみ**使用する必要があります。 詳細については、[プルポリシーのドキュメント](../executors/docker.md#configure-how-runners-pull-images)をお読みください。 ### SSH executorの使用 {#usage-of-ssh-executor} `StrictHostKeyChecking`オプションがないため、**SSH executorは、MITM攻撃対象領域(中間者攻撃対象領域)を受けやすい**。これは、将来のリリースのいずれかで修正されます。 ### Parallels executorの使用 {#usage-of-parallels-executor} **Parallels executorは、完全なシステム仮想マシンを使用し、分離された仮想マシンで実行するように設定されたVMマシンを使用するため、可能な限り最も安全なオプションです**。すべての周辺機器と共有フォルダーへのアクセスをブロックします。 ## Runnerの複製 {#cloning-a-runner} Runnerはトークンを使用してGitLabサーバーを識別します。Runnerを複製すると、複製されたRunnerがそのトークンに対して同じジョブを取得する可能性があります。これは、Runnerジョブを「盗む」ための可能な脅威ベクターです。 ## 共有環境で`GIT_STRATEGY: fetch`を使用する場合のセキュリティリスク {#security-risks-when-using-git_strategy-fetch-on-shared-environments} [`GIT_STRATEGY`](https://docs.gitlab.com/ci/runners/configure_runners/#git-strategy)を`fetch`に設定すると、RunnerはGitリポジトリのローカル実行コピーを再利用しようとします。 ローカルバージョンを使用すると、CI/CDジョブのパフォーマンスを向上させることができます。ただし、その再利用可能なコピーへのアクセス権を持つすべてのユーザーは、他のユーザーのパイプラインで実行されるコードを追加できます。 Gitは、サブモジュール(別のリポジトリに埋め込まれたリポジトリ)の内容を親リポジトリのGit参照ログに格納します。その結果、プロジェクトのサブモジュールが最初にクローンされた後、後続のジョブは、スクリプトで`git submodule update`を実行することにより、サブモジュールのコンテンツにアクセスできます。これは、サブモジュールが削除され、ジョブを開始したユーザーがサブモジュールプロジェクトへのアクセス権を持っていない場合でも適用されます。 共有環境へのアクセス権を持つすべてのユーザーを信頼できる場合にのみ`GIT_STRATEGY: fetch`を使用してください。 ## セキュリティ強化オプション {#security-hardening-options} ### 特権付きコンテナを使用するセキュリティリスクを軽減する {#reduce-the-security-risk-of-using-privileged-containers} Dockerの`--privileged`フラグの使用を必要とするCI/CDジョブを実行する必要がある場合は、以下の手順を実行して、セキュリティリスクを軽減できます: - `--privileged`フラグが有効になっているDockerコンテナは、分離された一時的な仮想マシンでのみ実行してください。 - Dockerの`--privileged`フラグの使用を必要とするジョブを実行するための専用のRunnerを設定します。次に、これらのRunnerを保護ブランチでのみジョブを実行するように設定します。 ### ネットワークセグメンテーション {#network-segmentation} GitLab Runnerは、ユーザーが制御するスクリプトを実行するように設計されています。ジョブが悪意のあるものである場合にアタックサーフェスを削減するために、独自のネットワークセグメントで実行することを検討できます。これにより、他のインフラストラクチャおよびサービスからのネットワーク分離が提供されます。 すべてのニーズは固有ですが、クラウドプロバイダー環境の場合、これには以下が含まれる可能性があります: - 独自のネットワークセグメントでのRunner仮想マシンの設定 - インターネットからRunner仮想マシンへのSSHアクセスをブロックする - Runner仮想マシン間のトラフィックを制限する - クラウドプロバイダーメタデータエンドポイントへのアクセスをフィルタリングする {{< alert type="note" >}} すべてのRunnerは、GitLab.comまたはGitLabインスタンスへの送信ネットワーク接続を必要とします。ほとんどのジョブは、依存関係のプルなどのために、インターネットへの送信ネットワーク接続も必要とします。 {{< /alert >}} ### Runnerホストを保護する {#secure-the-runner-host} Runnerに静的ホスト(ベアメタルまたは仮想マシン)を使用している場合は、ホストオペレーティングシステムのセキュリティのベストプラクティスを実装する必要があります。 CIジョブのコンテキストで実行される悪意のあるコードはホストを侵害する可能性があるため、セキュリティプロトコルは影響を軽減するのに役立ちます。留意すべきその他のポイントとしては、攻撃者が環境内の他のエンドポイントにアクセスできるようにする可能性のあるSSHキーなどのファイルをホストシステムから保護または削除することが挙げられます。 ### 各ビルド後に`.git`フォルダーをクリーンアップする {#clean-up-the-git-folder-after-each-build} Runnerに静的ホストを使用する場合は、`FF_ENABLE_JOB_CLEANUP` [機能フラグ](../configuration/feature-flags.md)を有効にすることで、セキュリティのレイヤーを追加できます。 `FF_ENABLE_JOB_CLEANUP`を有効にすると、Runnerがホストで使用するビルドディレクトリが各ビルド後にクリーンアップされます。 ================================================ FILE: docs-locale/ja-jp/shells/_index.md ================================================ --- stage: Verify group: Runner Core info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://handbook.gitlab.com/handbook/product/ux/technical-writing/#assignments title: GitLab RunnerでサポートされているShellの種類 --- {{< details >}} - プラン: Free、Premium、Ultimate - 提供形態: GitLab.com、GitLab Self-Managed、GitLab Dedicated {{< /details >}} GitLab Runnerは、さまざまなシステムでビルドを実行できるようにするShellスクリプトジェネレーターを実装しています。 Shellスクリプトには、ビルドのすべてのステップを実行するコマンドが含まれています: 1. `git clone` 1. ビルドキャッシュの復元 1. ビルドコマンド 1. ビルドキャッシュの更新 1. ビルドアーティファクトの生成とアップロード Shellには設定オプションはありません。[`script`の`.gitlab-ci.yml`ディレクティブ](https://docs.gitlab.com/ci/yaml/#script)で定義されたコマンドからビルドのステップを受信します。 サポートされているShellは次のとおりです: | Shell | 状態 | 説明 | |--------------|-----------------|-------------| | `bash` | 完全にサポート | Bash(Bourne Again Shell)。すべてのコマンドはBashコンテキストで実行されます(すべてのUnixシステムのデフォルト)。 | | `sh` | 完全にサポート | Sh(Bourne shell)。すべてのコマンドはShコンテキストで実行されます(すべてのUnixシステムの`bash`のフォールバック)。 | | `powershell` | 完全にサポート | PowerShellスクリプト。すべてのコマンドはPowerShell Desktopのコンテキストで実行されます。 | | `pwsh` | 完全にサポート | PowerShellスクリプト。すべてのコマンドはPowerShell Coreのコンテキストで実行されます。これは、Windowsで新しいRunnerを登録する際のデフォルトです。 | デフォルト以外の特定のShellを使用する場合は、`config.toml`ファイルで[Shellを指定する](../executors/shell.md#selecting-your-shell)必要があります。 ## Sh/Bash Shell {#shbash-shells} Sh/Bashは、すべてのUnixベースのシステムで使用されるデフォルトのShellです。`.gitlab-ci.yml`で使用されているbashスクリプトは、Shellスクリプトを次のいずれかのコマンドにパイプすることで実行されます: ```shell # This command is used if the build should be executed in context # of another user (the shell executor) cat generated-bash-script | su --shell /bin/bash --login user # This command is used if the build should be executed using # the current user, but in a login environment cat generated-bash-script | /bin/bash --login # This command is used if the build should be executed in # a Docker environment cat generated-bash-script | /bin/bash ``` ### Shellプロファイルの読み込み {#shell-profile-loading} 特定のexecutorでは、Runnerは前述のように`--login`フラグを渡します。これによりShellプロファイルも読み込みまれます。`.bashrc`、`.bash_logout`、または[その他のドットファイル](https://tldp.org/LDP/Bash-Beginners-Guide/html/sect_03_01.html#sect_03_01_02)に含まれている内容はすべてジョブで実行されます。 [`Prepare environment`ステージでジョブが失敗した](../faq/_index.md#job-failed-system-failure-preparing-environment)場合、その原因はShellプロファイル内にある可能性があります。一般的な失敗として、コンソールのクリアを試行する`.bash_logout`がある場合の失敗があります。 このエラーを解決するには、`/home/gitlab-runner/.bash_logout`を確認してください。たとえば、`.bash_logout`ファイルに次のようなスクリプトセクションがある場合は、このセクションをコメントアウトしてパイプラインを再起動します: ```shell if [ "$SHLVL" = 1 ]; then [ -x /usr/bin/clear_console ] && /usr/bin/clear_console -q fi ``` Shellプロファイルを読み込むexecutor: - [`shell`](../executors/shell.md) - [`parallels`](../executors/parallels.md)(*ターゲット*仮想マシンのShellプロファイルが読み込みまれます) - [`virtualbox`](../executors/virtualbox.md)(*ターゲット*仮想マシンのShellプロファイルが読み込みまれます) - [`ssh`](../executors/ssh.md)(*ターゲット*マシンのShellプロファイルが読み込みまれます) ## PowerShell {#powershell} PowerShell Desktop Editionは、GitLab Runner 12.0〜13.12を使用してWindowsに新しいRunnerを登録するときのデフォルトShellです。14.0以降では、デフォルトはPowerShell Core Editionです。 PowerShellは、別のユーザーのコンテキストでビルドを実行することをサポートしていません。 生成されたPowerShellスクリプトを実行するには、そのコンテンツをファイルに保存し、ファイル名を次のコマンドに渡します: - PowerShell Desktop Edition: ```batch powershell -NoProfile -NonInteractive -ExecutionPolicy Bypass -Command generated-windows-powershell.ps1 ``` - PowerShell Core Edition: ```batch pwsh -NoProfile -NonInteractive -ExecutionPolicy Bypass -Command generated-windows-powershell.ps1 ``` PowerShellスクリプトの例を以下に示します: ```powershell $ErrorActionPreference = "Continue" # This will be set to 'Stop' when targetting PowerShell Core echo "Running on $([Environment]::MachineName)..." & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE echo "Cloning repository..." if( (Get-Command -Name Remove-Item2 -Module NTFSSecurity -ErrorAction SilentlyContinue) -and (Test-Path "C:\GitLab-Runner\builds\0\project-1" -PathType Container) ) { Remove-Item2 -Force -Recurse "C:\GitLab-Runner\builds\0\project-1" } elseif(Test-Path "C:\GitLab-Runner\builds\0\project-1") { Remove-Item -Force -Recurse "C:\GitLab-Runner\builds\0\project-1" } & "git" "clone" "https://gitlab.com/group/project.git" "Z:\Gitlab\tests\test\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "Checking out db45ad9a as main..." & "git" "checkout" "db45ad9af9d7af5e61b829442fd893d96e31250c" if(!$?) { Exit $LASTEXITCODE } if(Test-Path "..\..\..\cache\project-1\pages\main\cache.tgz" -PathType Leaf) { echo "Restoring cache..." & "gitlab-runner-windows-amd64.exe" "extract" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" if(!$?) { Exit $LASTEXITCODE } } else { if(Test-Path "..\..\..\cache\project-1\pages\main\cache.tgz" -PathType Leaf) { echo "Restoring cache..." & "gitlab-runner-windows-amd64.exe" "extract" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" if(!$?) { Exit $LASTEXITCODE } } } } if(!$?) { Exit $LASTEXITCODE } & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "`$ echo true" echo true } if(!$?) { Exit $LASTEXITCODE } & { $CI="true" $env:CI=$CI $CI_COMMIT_SHA="db45ad9af9d7af5e61b829442fd893d96e31250c" $env:CI_COMMIT_SHA=$CI_COMMIT_SHA $CI_COMMIT_BEFORE_SHA="d63117656af6ff57d99e50cc270f854691f335ad" $env:CI_COMMIT_BEFORE_SHA=$CI_COMMIT_BEFORE_SHA $CI_COMMIT_REF_NAME="main" $env:CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME $CI_JOB_ID="1" $env:CI_JOB_ID=$CI_JOB_ID $CI_REPOSITORY_URL="Z:\Gitlab\tests\test" $env:CI_REPOSITORY_URL=$CI_REPOSITORY_URL $CI_PROJECT_ID="1" $env:CI_PROJECT_ID=$CI_PROJECT_ID $CI_PROJECT_DIR="Z:\Gitlab\tests\test\builds\0\project-1" $env:CI_PROJECT_DIR=$CI_PROJECT_DIR $CI_SERVER="yes" $env:CI_SERVER=$CI_SERVER $CI_SERVER_NAME="GitLab CI" $env:CI_SERVER_NAME=$CI_SERVER_NAME $CI_SERVER_VERSION="" $env:CI_SERVER_VERSION=$CI_SERVER_VERSION $CI_SERVER_REVISION="" $env:CI_SERVER_REVISION=$CI_SERVER_REVISION $GITLAB_CI="true" $env:GITLAB_CI=$GITLAB_CI $GIT_SSL_CAINFO="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $GIT_SSL_CAINFO | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $GIT_SSL_CAINFO="C:\GitLab-Runner\builds\0\project-1.tmp\GIT_SSL_CAINFO" $env:GIT_SSL_CAINFO=$GIT_SSL_CAINFO $CI_SERVER_TLS_CA_FILE="" New-Item -ItemType directory -Force -Path "C:\GitLab-Runner\builds\0\project-1.tmp" | out-null $CI_SERVER_TLS_CA_FILE | Out-File "C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $CI_SERVER_TLS_CA_FILE="C:\GitLab-Runner\builds\0\project-1.tmp\CI_SERVER_TLS_CA_FILE" $env:CI_SERVER_TLS_CA_FILE=$CI_SERVER_TLS_CA_FILE cd "C:\GitLab-Runner\builds\0\project-1" if(!$?) { Exit $LASTEXITCODE } echo "Archiving cache..." & "gitlab-runner-windows-amd64.exe" "archive" "--file" "..\..\..\cache\project-1\pages\main\cache.tgz" "--path" "vendor" if(!$?) { Exit $LASTEXITCODE } } if(!$?) { Exit $LASTEXITCODE } ``` ### Windows Batchの実行 {#running-windows-batch} PowerShellに移植されていない古いBatchスクリプトの場合は、`Start-Process "cmd.exe" "/c C:\Path\file.bat"`を使用してPowerShellからそのBatchスクリプトを実行できます。 ### PowerShellがデフォルトの場合の`CMD` Shellへのアクセス {#access-cmd-shell-when-powershell-is-the-default} [Call `CMD` From Default PowerShell in GitLab CI](https://gitlab.com/guided-explorations/microsoft/windows/call-cmd-from-powershell)プロジェクトは、`CMD` Shellへのアクセス権を取得する方法を示しています。このアプローチは、PowerShellがRunnerのデフォルトShellである場合に機能します。 ### PowerShellのサンプルの使い方を紹介するビデオチュートリアル {#video-walkthrough-of-working-powershell-examples} [Slicing and Dicing with PowerShell on GitLab CI](https://www.youtube.com/watch?v=UZvtAYwruFc)は、[PowerShell Pipelines on GitLab CI](https://gitlab.com/guided-explorations/microsoft/powershell/powershell-pipelines-on-gitlab-ci) Guided Explorationプロジェクトのチュートリアル動画です。これは以下の環境でテストされています: - [GitLab.com向けにWindows上でホストされるrunner](https://docs.gitlab.com/ci/runners/hosted_runners/windows/)のWindows PowerShellおよびPowerShell Core 7。 - [Docker-Machine Runner](../executors/docker_machine.md)を使用したLinux ContainersのPowerShell Core 7。 この例は、テスト用に自分のグループまたはインスタンスにコピーできます。他のGitLab CIパターンのデモについての詳細は、プロジェクトページをご覧ください。 ================================================ FILE: executors/abstract.go ================================================ package executors import ( "context" "os" "sync" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/session/proxy" ) type ExecutorOptions struct { DefaultCustomBuildsDirEnabled bool DefaultSafeDirectoryCheckout bool DefaultBuildsDir string DefaultCacheDir string SharedBuildsDir bool Shell common.ShellScriptInfo ShowHostname bool } type AbstractExecutor struct { ExecutorOptions BuildLogger buildlogger.Logger Config common.RunnerConfig Build *common.Build BuildShell *common.ShellConfiguration currentStage common.ExecutorStage Context context.Context ProxyPool proxy.Pool stageLock sync.RWMutex } func (e *AbstractExecutor) updateShell() error { script := e.Shell() script.Build = e.Build if e.Config.Shell != "" { script.Shell = e.Config.Shell } return nil } func (e *AbstractExecutor) ExpandValue(value string) string { return e.Build.GetAllVariables().ExpandValue(value) } func (e *AbstractExecutor) generateShellConfiguration() error { info := e.Shell() info.PreGetSourcesScript = e.Config.PreGetSourcesScript info.PostGetSourcesScript = e.Config.PostGetSourcesScript info.PreBuildScript = e.Config.PreBuildScript info.PostBuildScript = e.Config.PostBuildScript shellConfiguration, err := common.GetShellConfiguration(*info) if err != nil { return err } e.BuildShell = shellConfiguration e.BuildLogger.Debugln("Shell configuration:", shellConfiguration) return nil } func (e *AbstractExecutor) startBuild() error { // Save hostname if e.ShowHostname && e.Build.Hostname == "" { e.Build.Hostname, _ = os.Hostname() } return e.Build.StartBuild( e.RootDir(), e.CacheDir(), e.CustomBuildEnabled(), e.SharedBuildsDir, e.SafeDirectoryCheckout(), ) } func (e *AbstractExecutor) RootDir() string { if e.Config.BuildsDir != "" { return e.Config.BuildsDir } return e.DefaultBuildsDir } func (e *AbstractExecutor) CacheDir() string { if e.Config.CacheDir != "" { return e.Config.CacheDir } return e.DefaultCacheDir } func (e *AbstractExecutor) CustomBuildEnabled() bool { if enabled := e.Config.CustomBuildDir.Enabled; enabled != nil { return *enabled } return e.DefaultCustomBuildsDirEnabled } func (e *AbstractExecutor) SafeDirectoryCheckout() bool { if e.Config.SafeDirectoryCheckout != nil { return *e.Config.SafeDirectoryCheckout } return e.DefaultSafeDirectoryCheckout } func (e *AbstractExecutor) Shell() *common.ShellScriptInfo { return &e.ExecutorOptions.Shell } func (e *AbstractExecutor) Prepare(options common.ExecutorPrepareOptions) error { e.PrepareConfiguration(options) return e.PrepareBuildAndShell() } func (e *AbstractExecutor) PrepareConfiguration(options common.ExecutorPrepareOptions) { e.SetCurrentStage(common.ExecutorStagePrepare) e.Context = options.Context e.Config = *options.Config e.Build = options.Build e.BuildLogger = options.BuildLogger e.ProxyPool = proxy.NewPool() } func (e *AbstractExecutor) PrepareBuildAndShell() error { err := e.startBuild() if err != nil { return err } err = e.updateShell() if err != nil { return err } err = e.generateShellConfiguration() if err != nil { return err } return nil } func (e *AbstractExecutor) Finish(err error) { e.SetCurrentStage(common.ExecutorStageFinish) } func (e *AbstractExecutor) Cleanup() { e.SetCurrentStage(common.ExecutorStageCleanup) } func (e *AbstractExecutor) GetCurrentStage() common.ExecutorStage { e.stageLock.RLock() defer e.stageLock.RUnlock() return e.currentStage } func (e *AbstractExecutor) SetCurrentStage(stage common.ExecutorStage) { e.stageLock.Lock() defer e.stageLock.Unlock() e.currentStage = stage } ================================================ FILE: executors/custom/api/config.go ================================================ package api // ConfigExecOutput defines the output structure of the config_exec call. // // This should be used to pass the configuration values from Custom Executor // driver to the Runner. type ConfigExecOutput struct { Driver *DriverInfo `json:"driver,omitempty"` Hostname *string `json:"hostname,omitempty"` BuildsDir *string `json:"builds_dir,omitempty"` CacheDir *string `json:"cache_dir,omitempty"` BuildsDirIsShared *bool `json:"builds_dir_is_shared,omitempty"` JobEnv *map[string]string `json:"job_env,omitempty"` Shell *string `json:"shell,omitempty"` } // DriverInfo wraps the information about Custom Executor driver details // like the name or version type DriverInfo struct { Name *string `json:"name,omitempty"` Version *string `json:"version,omitempty"` } ================================================ FILE: executors/custom/api/const.go ================================================ package api const ( // The name of the variable used to pass the value of Build failure exit code // that should be returned from Custom executor driver BuildFailureExitCodeVariable = "BUILD_FAILURE_EXIT_CODE" // The name of the variable used to pass the value of System failure exit code // that should be returned from Custom executor driver SystemFailureExitCodeVariable = "SYSTEM_FAILURE_EXIT_CODE" // The name of the variable used to pass the value of the path to an optional // file that the driver can use to provide a specific build failure code BuildCodeFileVariable = "BUILD_EXIT_CODE_FILE" // The name of the variable used to pass the value of path to the file that // contains JSON encoded content of job API received from GitLab's API JobResponseFileVariable = "JOB_RESPONSE_FILE" ) ================================================ FILE: executors/custom/command/command.go ================================================ package command import ( "bufio" "context" "fmt" "os" "os/exec" "strconv" "time" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/api" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) const ( BuildFailureExitCode = 1 SystemFailureExitCode = 2 ) type Command interface { Run() error } var newProcessKillWaiter = process.NewOSKillWait var newCommander = process.NewOSCmd type Options struct { JobResponseFile string BuildExitCodeFile string } type command struct { context context.Context cmd process.Commander waitCh chan error logger process.Logger gracefulKillTimeout time.Duration forceKillTimeout time.Duration buildCodeFile string } func New( ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options Options, ) Command { defaultVariables := map[string]string{ "TMPDIR": cmdOpts.Dir, api.BuildFailureExitCodeVariable: strconv.Itoa(BuildFailureExitCode), api.SystemFailureExitCodeVariable: strconv.Itoa(SystemFailureExitCode), api.BuildCodeFileVariable: options.BuildExitCodeFile, api.JobResponseFileVariable: options.JobResponseFile, } env := os.Environ() for key, value := range defaultVariables { env = append(env, fmt.Sprintf("%s=%s", key, value)) } cmdOpts.Env = append(env, cmdOpts.Env...) return &command{ context: ctx, cmd: newCommander(executable, args, cmdOpts), waitCh: make(chan error), logger: cmdOpts.Logger, gracefulKillTimeout: cmdOpts.GracefulKillTimeout, forceKillTimeout: cmdOpts.ForceKillTimeout, buildCodeFile: options.BuildExitCodeFile, } } func (c *command) Run() error { err := c.cmd.Start() if err != nil { return fmt.Errorf("failed to start command: %w", err) } go c.waitForCommand() select { case err = <-c.waitCh: return err case <-c.context.Done(): return newProcessKillWaiter(c.logger, c.gracefulKillTimeout, c.forceKillTimeout). KillAndWait(c.cmd, c.waitCh) } } var getExitCode = func(err *exec.ExitError) int { return err.ExitCode() } func (c *command) waitForCommand() { err := c.cmd.Wait() eerr, ok := err.(*exec.ExitError) if ok { exitCode := getExitCode(eerr) switch { case exitCode == BuildFailureExitCode: err = c.parseBuildFailure(eerr) case exitCode != SystemFailureExitCode: err = &ErrUnknownFailure{Inner: eerr, ExitCode: exitCode} } } c.waitCh <- err } func (c *command) parseBuildFailure(eerr *exec.ExitError) error { file, err := os.Open(c.buildCodeFile) if err != nil { // If the driver has not generated a file at the prescribed location // we revert to the default BuildError and exitCode. return &common.BuildError{Inner: eerr, ExitCode: BuildFailureExitCode} } defer file.Close() var codeStr string scanner := bufio.NewScanner(file) scanner.Split(bufio.ScanLines) for scanner.Scan() { codeStr = scanner.Text() break } bErrCode, err := strconv.Atoi(codeStr) if err != nil { return &ErrUnknownFailure{Inner: eerr, ExitCode: SystemFailureExitCode} } // We want to modify the exit code found in the error message to reflect the // true error as defined in the file. This aims to prevent confusion users // would like experience when presented with the exit status in the job log. return &common.BuildError{Inner: fmt.Errorf("exit status %s", codeStr), ExitCode: bErrCode} } ================================================ FILE: executors/custom/command/command_test.go ================================================ //go:build !integration package command import ( "context" "errors" "os" "os/exec" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) func newCommand( ctx context.Context, t *testing.T, executable string, cmdOpts process.CommandOptions, options Options, ) (*process.MockCommander, *process.MockKillWaiter, Command) { commanderMock := process.NewMockCommander(t) processKillWaiterMock := process.NewMockKillWaiter(t) oldNewCmd := newCommander oldNewProcessKillWaiter := newProcessKillWaiter t.Cleanup(func() { newCommander = oldNewCmd newProcessKillWaiter = oldNewProcessKillWaiter }) newCommander = func(string, []string, process.CommandOptions) process.Commander { return commanderMock } newProcessKillWaiter = func(process.Logger, time.Duration, time.Duration) process.KillWaiter { return processKillWaiterMock } c := New(ctx, executable, []string{}, cmdOpts, options) return commanderMock, processKillWaiterMock, c } func TestCommand_Run(t *testing.T) { testErr := errors.New("test error") tests := map[string]struct { cmdStartErr error cmdWaitErr error getExitCode func(err *exec.ExitError) int contextClosed bool process *os.Process expectedError string expectedErrorType interface{} expectedExitCode int options Options }{ "error on cmd start()": { cmdStartErr: errors.New("test-error"), expectedError: "failed to start command: test-error", }, "command ends with a build failure": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return BuildFailureExitCode }, expectedError: "exit status 0", expectedErrorType: &common.BuildError{}, expectedExitCode: BuildFailureExitCode, }, "command ends with a system failure": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return SystemFailureExitCode }, expectedError: "exit status 0", expectedErrorType: &exec.ExitError{}, }, "command ends with a unknown failure": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return 255 }, expectedError: "unknown Custom executor executable exit code 255; " + "executable execution terminated with: exit status 0", expectedErrorType: &ErrUnknownFailure{}, }, "command times out": { contextClosed: true, process: &os.Process{Pid: 1234}, expectedError: testErr.Error(), }, "command ends with invalid build failure file": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return BuildFailureExitCode }, expectedError: "unknown Custom executor executable exit code 2; " + "executable execution terminated with: exit status 0", expectedErrorType: &ErrUnknownFailure{}, options: func() Options { filename := t.TempDir() + "/invalid" err := os.WriteFile(filename, []byte("invalid"), 0o600) require.NoError(t, err) return Options{BuildExitCodeFile: filename} }(), }, "command ends with build failure file": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return BuildFailureExitCode }, expectedError: "exit status 42", expectedErrorType: &common.BuildError{}, expectedExitCode: 42, options: func() Options { filename := t.TempDir() + "/valid" err := os.WriteFile(filename, []byte("42"), 0o600) require.NoError(t, err) return Options{BuildExitCodeFile: filename} }(), }, "additional information ignored": { cmdWaitErr: &exec.ExitError{ProcessState: &os.ProcessState{}}, getExitCode: func(err *exec.ExitError) int { return BuildFailureExitCode }, expectedError: "exit status 42", expectedErrorType: &common.BuildError{}, expectedExitCode: 42, options: func() Options { filename := t.TempDir() + "/valid" err := os.WriteFile(filename, []byte("42\n\nTesting..."), 0o600) require.NoError(t, err) return Options{BuildExitCodeFile: filename} }(), }, } for testName, tt := range tests { tt := tt t.Run(testName, func(t *testing.T) { ctx, ctxCancel := context.WithCancel(t.Context()) defer ctxCancel() cmdOpts := process.CommandOptions{ Logger: process.NewMockLogger(t), GracefulKillTimeout: 100 * time.Millisecond, ForceKillTimeout: 100 * time.Millisecond, } commanderMock, processKillWaiterMock, c := newCommand(ctx, t, "exec", cmdOpts, tt.options) commanderMock.On("Start"). Return(tt.cmdStartErr) commanderMock.On("Wait"). Return(func() error { <-time.After(500 * time.Millisecond) return tt.cmdWaitErr }). Maybe() if tt.getExitCode != nil { oldGetExitCode := getExitCode defer func() { getExitCode = oldGetExitCode }() getExitCode = tt.getExitCode } if tt.contextClosed { ctxCancel() processKillWaiterMock. On("KillAndWait", commanderMock, mock.Anything). Return(testErr). Once() } err := c.Run() if tt.expectedError == "" { assert.NoError(t, err) return } assert.EqualError(t, err, tt.expectedError) if tt.expectedErrorType != nil { assert.IsType(t, tt.expectedErrorType, err) } if tt.expectedExitCode != 0 { var buildError *common.BuildError if errors.As(err, &buildError) { assert.Equal(t, tt.expectedExitCode, buildError.ExitCode) } } }) } } ================================================ FILE: executors/custom/command/errors.go ================================================ package command import ( "fmt" ) type ErrUnknownFailure struct { Inner error ExitCode int } func (e *ErrUnknownFailure) Error() string { return fmt.Sprintf( "unknown Custom executor executable exit code %d; executable execution terminated with: %v", e.ExitCode, e.Inner, ) } ================================================ FILE: executors/custom/command/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package command import ( mock "github.com/stretchr/testify/mock" ) // NewMockCommand creates a new instance of MockCommand. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockCommand(t interface { mock.TestingT Cleanup(func()) }) *MockCommand { mock := &MockCommand{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockCommand is an autogenerated mock type for the Command type type MockCommand struct { mock.Mock } type MockCommand_Expecter struct { mock *mock.Mock } func (_m *MockCommand) EXPECT() *MockCommand_Expecter { return &MockCommand_Expecter{mock: &_m.Mock} } // Run provides a mock function for the type MockCommand func (_mock *MockCommand) Run() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Run") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // MockCommand_Run_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Run' type MockCommand_Run_Call struct { *mock.Call } // Run is a helper method to define mock.On call func (_e *MockCommand_Expecter) Run() *MockCommand_Run_Call { return &MockCommand_Run_Call{Call: _e.mock.On("Run")} } func (_c *MockCommand_Run_Call) Run(run func()) *MockCommand_Run_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockCommand_Run_Call) Return(err error) *MockCommand_Run_Call { _c.Call.Return(err) return _c } func (_c *MockCommand_Run_Call) RunAndReturn(run func() error) *MockCommand_Run_Call { _c.Call.Return(run) return _c } ================================================ FILE: executors/custom/config.go ================================================ package custom import ( "time" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) type config struct { *common.CustomConfig } func (c *config) GetConfigExecTimeout() time.Duration { return getDuration(c.ConfigExecTimeout, defaultConfigExecTimeout) } func (c *config) GetPrepareExecTimeout() time.Duration { return getDuration(c.PrepareExecTimeout, defaultPrepareExecTimeout) } func (c *config) GetCleanupScriptTimeout() time.Duration { return getDuration(c.CleanupExecTimeout, defaultCleanupExecTimeout) } func (c *config) GetGracefulKillTimeout() time.Duration { return getDuration(c.GracefulKillTimeout, process.GracefulTimeout) } func (c *config) GetForceKillTimeout() time.Duration { return getDuration(c.ForceKillTimeout, process.KillTimeout) } func getDuration(source *int, defaultValue time.Duration) time.Duration { if source == nil { return defaultValue } timeout := *source if timeout <= 0 { return defaultValue } return time.Duration(timeout) * time.Second } ================================================ FILE: executors/custom/config_test.go ================================================ //go:build !integration package custom import ( "testing" "time" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) type getDurationTestCase struct { source *int expectedValue time.Duration } func testGetDuration(t *testing.T, defaultValue time.Duration, assert func(*testing.T, getDurationTestCase)) { tests := map[string]getDurationTestCase{ "source undefined": { expectedValue: defaultValue, }, "source value lower than zero": { source: func() *int { i := -10; return &i }(), expectedValue: defaultValue, }, "source value greater than zero": { source: func() *int { i := 10; return &i }(), expectedValue: time.Duration(10) * time.Second, }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { assert(t, tt) }) } } func TestConfig_GetConfigExecTimeout(t *testing.T) { testGetDuration(t, defaultConfigExecTimeout, func(t *testing.T, tt getDurationTestCase) { c := &config{ CustomConfig: &common.CustomConfig{ ConfigExecTimeout: tt.source, }, } assert.Equal(t, tt.expectedValue, c.GetConfigExecTimeout()) }) } func TestConfig_GetPrepareExecTimeout(t *testing.T) { testGetDuration(t, defaultPrepareExecTimeout, func(t *testing.T, tt getDurationTestCase) { c := &config{ CustomConfig: &common.CustomConfig{ PrepareExecTimeout: tt.source, }, } assert.Equal(t, tt.expectedValue, c.GetPrepareExecTimeout()) }) } func TestConfig_GetCleanupExecTimeout(t *testing.T) { testGetDuration(t, defaultCleanupExecTimeout, func(t *testing.T, tt getDurationTestCase) { c := &config{ CustomConfig: &common.CustomConfig{ CleanupExecTimeout: tt.source, }, } assert.Equal(t, tt.expectedValue, c.GetCleanupScriptTimeout()) }) } func TestConfig_GetTerminateTimeout(t *testing.T) { testGetDuration(t, process.GracefulTimeout, func(t *testing.T, tt getDurationTestCase) { c := &config{ CustomConfig: &common.CustomConfig{ GracefulKillTimeout: tt.source, }, } assert.Equal(t, tt.expectedValue, c.GetGracefulKillTimeout()) }) } func TestConfig_GetForceKillTimeout(t *testing.T) { testGetDuration(t, process.KillTimeout, func(t *testing.T, tt getDurationTestCase) { c := &config{ CustomConfig: &common.CustomConfig{ ForceKillTimeout: tt.source, }, } assert.Equal(t, tt.expectedValue, c.GetForceKillTimeout()) }) } ================================================ FILE: executors/custom/consts.go ================================================ package custom import "time" const defaultConfigExecTimeout = time.Hour const defaultPrepareExecTimeout = time.Hour const defaultCleanupExecTimeout = time.Hour ================================================ FILE: executors/custom/custom.go ================================================ package custom import ( "bytes" "context" "encoding/json" "errors" "fmt" "io" "os" "path/filepath" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/api" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/command" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) type commandOutputs struct { stdout io.WriteCloser stderr io.WriteCloser } func (c *commandOutputs) Close() error { return errors.Join(c.stdout.Close(), c.stderr.Close()) } type prepareCommandOpts struct { executable string args []string out commandOutputs } type ConfigExecOutput struct { api.ConfigExecOutput } type jsonService struct { Name string `json:"name"` Alias string `json:"alias"` Entrypoint []string `json:"entrypoint"` Command []string `json:"command"` } func (c *ConfigExecOutput) InjectInto(executor *executor) { if c.Hostname != nil { executor.Build.Hostname = *c.Hostname } if c.BuildsDir != nil { executor.Config.BuildsDir = *c.BuildsDir } if c.CacheDir != nil { executor.Config.CacheDir = *c.CacheDir } if c.BuildsDirIsShared != nil { executor.SharedBuildsDir = *c.BuildsDirIsShared } executor.driverInfo = c.Driver if c.JobEnv != nil { executor.jobEnv = *c.JobEnv } if c.Shell != nil { executor.Config.Shell = *c.Shell } } type executor struct { executors.AbstractExecutor config *config tempDir string jobResponseFile string buildExitCodeFile string driverInfo *api.DriverInfo jobEnv map[string]string } func (e *executor) Prepare(options common.ExecutorPrepareOptions) error { e.AbstractExecutor.PrepareConfiguration(options) err := e.prepareConfig() if err != nil { return err } e.tempDir, err = os.MkdirTemp("", "custom-executor") if err != nil { return err } e.jobResponseFile, err = e.createJobResponseFile() if err != nil { return err } e.buildExitCodeFile = filepath.Join(e.tempDir, "build_exit_code") err = e.dynamicConfig() if err != nil { return err } e.logStartupMessage() err = e.AbstractExecutor.PrepareBuildAndShell() if err != nil { return err } // nothing to do, as there's no prepare_script if e.config.PrepareExec == "" { return nil } ctx, cancelFunc := context.WithTimeout(e.Context, e.config.GetPrepareExecTimeout()) defer cancelFunc() opts := prepareCommandOpts{ executable: e.config.PrepareExec, args: e.config.PrepareArgs, out: commandOutputs{ stdout: e.BuildLogger.Stream(buildlogger.StreamExecutorLevel, buildlogger.Stdout), stderr: e.BuildLogger.Stream(buildlogger.StreamExecutorLevel, buildlogger.Stderr), }, } defer opts.out.Close() return e.prepareCommand(ctx, opts).Run() } func (e *executor) prepareConfig() error { if e.Config.Custom == nil { return common.MakeBuildError("custom executor not configured") } e.config = &config{ CustomConfig: e.Config.Custom, } if e.config.RunExec == "" { return common.MakeBuildError("custom executor is missing RunExec") } return nil } func (e *executor) createJobResponseFile() (string, error) { responseFile := filepath.Join(e.tempDir, "response.json") file, err := os.OpenFile(responseFile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0o600) if err != nil { return "", fmt.Errorf("creating job response file %q: %w", responseFile, err) } defer func() { _ = file.Close() }() encoder := json.NewEncoder(file) err = encoder.Encode(e.Build.Job) if err != nil { return "", fmt.Errorf("encoding job response file: %w", err) } return responseFile, nil } func (e *executor) dynamicConfig() error { if e.config.ConfigExec == "" { return nil } ctx, cancelFunc := context.WithTimeout(e.Context, e.config.GetConfigExecTimeout()) defer cancelFunc() buf := bytes.NewBuffer(nil) opts := prepareCommandOpts{ executable: e.config.ConfigExec, args: e.config.ConfigArgs, out: commandOutputs{ stdout: buildlogger.NewNopCloser(buf), stderr: e.BuildLogger.Stream(buildlogger.StreamExecutorLevel, buildlogger.Stderr), }, } defer opts.out.Close() // Force refresh of all build variables for the upcoming command, ensuring // that the up-to-date environment variables are provided to the ConfigExec script. e.Build.RefreshAllVariables() err := e.prepareCommand(ctx, opts).Run() if err != nil { return err } jsonConfig := buf.Bytes() if len(jsonConfig) < 1 { return nil } config := new(ConfigExecOutput) err = json.Unmarshal(jsonConfig, config) if err != nil { return fmt.Errorf("error while parsing JSON output: %w", err) } config.InjectInto(e) return nil } func (e *executor) logStartupMessage() { const usageLine = "Using Custom executor" info := e.driverInfo if info == nil || info.Name == nil { e.BuildLogger.Println(fmt.Sprintf("%s...", usageLine)) return } if info.Version == nil { e.BuildLogger.Println(fmt.Sprintf("%s with driver %s...", usageLine, *info.Name)) return } e.BuildLogger.Println(fmt.Sprintf("%s with driver %s %s...", usageLine, *info.Name, *info.Version)) } var commandFactory = command.New func (e *executor) prepareCommand(ctx context.Context, opts prepareCommandOpts) command.Command { logger := common.NewProcessLoggerAdapter(e.BuildLogger) cmdOpts := process.CommandOptions{ Dir: e.tempDir, Env: make([]string, 0), Stdout: opts.out.stdout, Stderr: opts.out.stderr, Logger: logger, GracefulKillTimeout: e.config.GetGracefulKillTimeout(), ForceKillTimeout: e.config.GetForceKillTimeout(), UseWindowsLegacyProcessStrategy: e.Build.IsFeatureFlagOn(featureflags.UseWindowsLegacyProcessStrategy), UseWindowsJobObject: e.Build.IsFeatureFlagOn(featureflags.UseWindowsJobObject), } // Append job_env defined variable first to avoid overwriting any CI/CD or predefined variables. for k, v := range e.jobEnv { cmdOpts.Env = append(cmdOpts.Env, fmt.Sprintf("%s=%s", k, v)) } variables := append(e.Build.GetAllVariables(), e.getCIJobServicesEnv()) for _, variable := range variables { cmdOpts.Env = append(cmdOpts.Env, fmt.Sprintf("CUSTOM_ENV_%s=%s", variable.Key, variable.Value)) } options := command.Options{ JobResponseFile: e.jobResponseFile, BuildExitCodeFile: e.buildExitCodeFile, } return commandFactory(ctx, opts.executable, opts.args, cmdOpts, options) } func (e *executor) getCIJobServicesEnv() spec.Variable { if len(e.Build.Services) == 0 { return spec.Variable{Key: "CI_JOB_SERVICES"} } var services []jsonService for _, service := range e.Build.Services { services = append(services, jsonService{ Name: service.Name, Alias: append(service.Aliases(), "")[0], Entrypoint: service.Entrypoint, Command: service.Command, }) } servicesSerialized, err := json.Marshal(services) if err != nil { e.BuildLogger.Warningln("Unable to serialize CI_JOB_SERVICES json:", err) } return spec.Variable{ Key: "CI_JOB_SERVICES", Value: string(servicesSerialized), } } func (e *executor) Run(cmd common.ExecutorCommand) error { scriptDir, err := os.MkdirTemp(e.tempDir, "script") if err != nil { return err } scriptName := "script" if e.BuildShell.Extension != "" { scriptName += "." + e.BuildShell.Extension } scriptFile := filepath.Join(scriptDir, scriptName) err = os.WriteFile(scriptFile, []byte(cmd.Script), 0o700) if err != nil { return err } // TODO: Remove this translation - https://gitlab.com/groups/gitlab-org/-/epics/6112 stage := cmd.Stage if stage == "step_script" { e.BuildLogger.Warningln("Starting with version 17.0 the 'build_script' stage " + "will be replaced with 'step_script': https://gitlab.com/groups/gitlab-org/-/epics/6112") stage = "build_script" } args := append(e.config.RunArgs, scriptFile, string(stage)) //nolint:gocritic opts := prepareCommandOpts{ executable: e.config.RunExec, args: args, out: commandOutputs{ stdout: e.BuildLogger.Stream(buildlogger.StreamWorkLevel, buildlogger.Stdout), stderr: e.BuildLogger.Stream(buildlogger.StreamWorkLevel, buildlogger.Stderr), }, } defer opts.out.Close() return e.prepareCommand(cmd.Context, opts).Run() } func (e *executor) Cleanup() { e.AbstractExecutor.Cleanup() err := e.prepareConfig() if err != nil { e.BuildLogger.Warningln(err) // at this moment we don't care about the errors return } defer func() { _ = os.RemoveAll(e.tempDir) }() // nothing to do, as there's no cleanup_script if e.config.CleanupExec == "" { return } ctx, cancelFunc := context.WithTimeout(context.Background(), e.config.GetCleanupScriptTimeout()) defer cancelFunc() stdoutLogger := e.BuildLogger.WithFields(logrus.Fields{"cleanup_std": "out"}) stderrLogger := e.BuildLogger.WithFields(logrus.Fields{"cleanup_std": "err"}) opts := prepareCommandOpts{ executable: e.config.CleanupExec, args: e.config.CleanupArgs, out: commandOutputs{ stdout: stdoutLogger.WriterLevel(logrus.DebugLevel), stderr: stderrLogger.WriterLevel(logrus.WarnLevel), }, } defer opts.out.Close() err = e.prepareCommand(ctx, opts).Run() if err != nil { e.BuildLogger.Warningln("Cleanup script failed:", err) } } func NewProvider(runnerCommandPath string) common.ExecutorProvider { options := executors.ExecutorOptions{ DefaultCustomBuildsDirEnabled: false, DefaultSafeDirectoryCheckout: false, Shell: common.ShellScriptInfo{ Shell: common.GetDefaultShell(), Type: common.NormalShell, RunnerCommand: runnerCommandPath, }, ShowHostname: false, } creator := func() common.Executor { return &executor{ AbstractExecutor: executors.AbstractExecutor{ ExecutorOptions: options, }, } } featuresUpdater := func(features *common.FeaturesInfo) { features.Variables = true features.Shared = true } return executors.DefaultExecutorProvider{ Creator: creator, FeaturesUpdater: featuresUpdater, DefaultShellName: options.Shell.Shell, } } ================================================ FILE: executors/custom/custom_test.go ================================================ //go:build !integration package custom import ( "bytes" "context" "encoding/json" "errors" "fmt" "io" "os" "path/filepath" "runtime" "strings" "testing" "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/command" "gitlab.com/gitlab-org/gitlab-runner/helpers/process" ) type executorTestCase struct { config common.RunnerConfig commandStdoutContent string commandStderrContent string commandErr error doNotMockCommandFactory bool adjustExecutor func(t *testing.T, e *executor) adjustOptions func(t *testing.T, options common.ExecutorPrepareOptions) assertBuild func(t *testing.T, b *common.Build) assertCommandFactory func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) assertOutput func(t *testing.T, output string) assertExecutor func(t *testing.T, e *executor) expectedError string } func getRunnerConfig(custom *common.CustomConfig) common.RunnerConfig { rc := common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "RuNnErToKeN", }, RunnerSettings: common.RunnerSettings{ BuildsDir: "/builds", CacheDir: "/cache", Shell: "bash", }, } if custom != nil { rc.Custom = custom } return rc } func prepareExecutorForCleanup(t *testing.T, tt executorTestCase) (*executor, *bytes.Buffer) { e, options, out := prepareExecutor(t, tt) e.Config = *options.Config e.Build = options.Build e.BuildLogger = options.BuildLogger return e, out } func prepareExecutor(t *testing.T, tt executorTestCase) (*executor, common.ExecutorPrepareOptions, *bytes.Buffer) { out := bytes.NewBuffer([]byte{}) successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) successfulBuild.ID = jobID() trace := common.NewMockJobTrace(t) trace.On("Write", mock.Anything). Run(func(args mock.Arguments) { _, err := io.Copy(out, bytes.NewReader(args.Get(0).([]byte))) require.NoError(t, err) }). Return(0, nil). Maybe() trace.On("IsStdout"). Return(false). Maybe() options := common.ExecutorPrepareOptions{ Build: &common.Build{ Job: successfulBuild, Runner: &tt.config, }, Config: &tt.config, Context: t.Context(), BuildLogger: buildlogger.New(trace, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}), } e := new(executor) return e, options, out } var currentJobID = int64(0) func jobID() int64 { i := currentJobID currentJobID++ return i } func assertOutput(t *testing.T, tt executorTestCase, out *bytes.Buffer) { if tt.assertOutput == nil { return } tt.assertOutput(t, out.String()) } func mockCommandFactory(t *testing.T, tt executorTestCase) { if tt.doNotMockCommandFactory { return } outputs := commandOutputs{ stdout: nil, stderr: nil, } cmd := command.NewMockCommand(t) cmd.On("Run"). Run(func(_ mock.Arguments) { if outputs.stdout != nil { defer outputs.stdout.Close() } if outputs.stderr != nil { defer outputs.stderr.Close() } if tt.commandStdoutContent != "" && outputs.stdout != nil { _, err := fmt.Fprintln(outputs.stdout, tt.commandStdoutContent) require.NoError(t, err, "Unexpected error on mocking command output to stdout") } if tt.commandStderrContent != "" && outputs.stderr != nil { _, err := fmt.Fprintln(outputs.stderr, tt.commandStderrContent) require.NoError(t, err, "Unexpected error on mocking command output to stderr") } }). Return(tt.commandErr) oldFactory := commandFactory commandFactory = func( ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) command.Command { if tt.assertCommandFactory != nil { tt.assertCommandFactory(t, tt, ctx, executable, args, cmdOpts, options) } outputs.stdout = buildlogger.NewNopCloser(cmdOpts.Stdout) outputs.stderr = buildlogger.NewNopCloser(cmdOpts.Stderr) return cmd } t.Cleanup(func() { commandFactory = oldFactory }) } func TestExecutor_Prepare(t *testing.T) { tests := map[string]executorTestCase{ "AbstractExecutor.Prepare failure": { config: common.RunnerConfig{}, doNotMockCommandFactory: true, expectedError: "custom executor not configured", }, "custom executor not set": { config: getRunnerConfig(nil), doNotMockCommandFactory: true, expectedError: "custom executor not configured", }, "custom executor set without RunExec": { config: getRunnerConfig(&common.CustomConfig{}), doNotMockCommandFactory: true, expectedError: "custom executor is missing RunExec", }, "custom executor set": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", }), doNotMockCommandFactory: true, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, }, "custom executor set with ConfigExec with error": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", ConfigArgs: []string{"test"}, }), commandErr: errors.New("test-error"), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) assert.Equal(t, tt.config.Custom.ConfigArgs, args) }, assertOutput: func(t *testing.T, output string) { assert.NotContains(t, output, "Using Custom executor...") }, expectedError: "test-error", }, "custom executor set with ConfigExec with invalid JSON": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: "abcd", commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.NotContains(t, output, "Using Custom executor...") }, expectedError: "error while parsing JSON output: invalid character 'a' looking for beginning of value", }, "custom executor set with ConfigExec with empty JSON": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: "", commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, assertBuild: func(t *testing.T, b *common.Build) { assert.Equal(t, "/builds/project-0", b.BuildDir) assert.Equal(t, "/cache/project-0", b.CacheDir) }, assertExecutor: func(t *testing.T, e *executor) { assert.Nil(t, e.jobEnv) }, }, "custom executor set with ConfigExec with undefined builds_dir": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: `{"builds_dir":""}`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, expectedError: "the builds_dir is not configured", }, "custom executor set with ConfigExec and driver info missing name": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: `{ "driver": { "version": "v0.0.1" } }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, }, "custom executor set with ConfigExec and driver info missing version": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: `{ "driver": { "name": "test driver" } }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor with driver test driver...") }, }, "custom executor set with ConfigExec": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: `{ "hostname": "custom-hostname", "builds_dir": "/some/build/directory", "cache_dir": "/some/cache/directory", "builds_dir_is_shared":true, "driver": { "name": "test driver", "version": "v0.0.1" }, "shell": "powershell" }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor with driver test driver v0.0.1...") }, assertBuild: func(t *testing.T, b *common.Build) { assert.Equal(t, "custom-hostname", b.Hostname) assert.Equal(t, "/some/build/directory/RuNnErToK/0/project-0", b.BuildDir) assert.Equal(t, "/some/cache/directory/project-0", b.CacheDir) }, assertExecutor: func(t *testing.T, e *executor) { assert.Equal(t, "powershell", e.Shell().Shell) }, }, "custom executor set with PrepareExec": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", PrepareExec: "echo", PrepareArgs: []string{"test"}, }), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.PrepareExec, executable) assert.Equal(t, tt.config.Custom.PrepareArgs, args) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, }, "custom executor set with PrepareExec with error": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", PrepareExec: "echo", PrepareArgs: []string{"test"}, }), commandErr: errors.New("test-error"), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.PrepareExec, executable) assert.Equal(t, tt.config.Custom.PrepareArgs, args) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "Using Custom executor...") }, expectedError: "test-error", }, "custom executor set with valid job_env": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), commandStdoutContent: `{ "builds_dir": "/some/build/directory", "job_env": { "FOO": "Hello", "BAR": "World" } }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.ConfigExec, executable) }, assertBuild: func(t *testing.T, b *common.Build) { assert.Equal(t, "/some/build/directory/project-0", b.BuildDir) }, assertExecutor: func(t *testing.T, e *executor) { assert.Len(t, e.jobEnv, 2) require.Contains(t, e.jobEnv, "FOO") assert.Equal(t, "Hello", e.jobEnv["FOO"]) require.Contains(t, e.jobEnv, "BAR") assert.Equal(t, "World", e.jobEnv["BAR"]) }, }, "custom executor set with valid job_env, verify variable order and prefix": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "run-executable", ConfigExec: "config-executable", PrepareExec: "prepare-executable", PrepareArgs: []string{"test"}, }), commandStdoutContent: `{ "builds_dir": "/some/build/directory", "job_env": { "FOO": "Hello" } }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { if executable != "prepare-executable" { return } require.True(t, len(cmdOpts.Env) >= 2, "cmdOpts.Env must contain 2 elements or more") assert.Equal(t, "FOO=Hello", cmdOpts.Env[0], "first env var must be FOO") assert.True( t, strings.HasPrefix(cmdOpts.Env[1], "CUSTOM_ENV_"), "must be followed by CUSTOM_ENV_* variables", ) }, }, "job response file specified in file": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "run-executable", ConfigExec: "config-executable", PrepareExec: "prepare-executable", PrepareArgs: []string{"test"}, }), commandStdoutContent: `{ "builds_dir": "/some/build/directory" }`, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.NotEmpty(t, options.JobResponseFile) }, }, "custom executor variable reset before ConfigExec": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", ConfigExec: "echo", }), adjustOptions: func(t *testing.T, options common.ExecutorPrepareOptions) { // Running this will set b.allVariables (common/build.go) before test. _ = options.Build.GetAllVariables() options.Build.RunnerID = 1 options.Build.ProjectRunnerID = 1 }, commandErr: nil, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Contains(t, cmdOpts.Env, "CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID=1") assert.Contains(t, cmdOpts.Env, "CUSTOM_ENV_CI_CONCURRENT_ID=1") }, }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { mockCommandFactory(t, tt) e, options, out := prepareExecutor(t, tt) if tt.adjustOptions != nil { tt.adjustOptions(t, options) } err := e.Prepare(options) assertOutput(t, tt, out) if tt.assertBuild != nil { tt.assertBuild(t, e.Build) } if tt.assertExecutor != nil { tt.assertExecutor(t, e) } if tt.expectedError == "" { assert.NoError(t, err) return } assert.EqualError(t, err, tt.expectedError) }) } } func TestExecutor_Cleanup(t *testing.T) { tests := map[string]executorTestCase{ "custom executor not set": { config: getRunnerConfig(nil), assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "custom executor not configured") }, doNotMockCommandFactory: true, }, "custom executor set without RunExec": { config: getRunnerConfig(&common.CustomConfig{}), assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "custom executor is missing RunExec") }, doNotMockCommandFactory: true, }, "custom executor set": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", }), doNotMockCommandFactory: true, }, "custom executor set with CleanupExec": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", CleanupExec: "echo", CleanupArgs: []string{"test"}, }), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.CleanupExec, executable) assert.Equal(t, tt.config.Custom.CleanupArgs, args) }, assertOutput: func(t *testing.T, output string) { assert.NotContains(t, output, "WARNING: Cleanup script failed:") }, }, "custom executor set with CleanupExec with error": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", CleanupExec: "unknown", }), commandStdoutContent: "some output message in commands output", commandStderrContent: "some error message in commands output", commandErr: errors.New("test-error"), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.CleanupExec, executable) }, assertOutput: func(t *testing.T, output string) { assert.Contains(t, output, "WARNING: Cleanup script failed: test-error") }, }, "custom executor set with valid job_env, verify variable order and prefix": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", CleanupExec: "echo", CleanupArgs: []string{"test"}, }), adjustExecutor: func(t *testing.T, e *executor) { e.jobEnv = map[string]string{"FOO": "Hello"} }, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { require.True(t, len(cmdOpts.Env) >= 2, "cmdOpts.Env must contain 2 elements or more") assert.Equal(t, "FOO=Hello", cmdOpts.Env[0], "first env var must be FOO") assert.True( t, strings.HasPrefix(cmdOpts.Env[1], "CUSTOM_ENV_"), "must be followed by CUSTOM_ENV_* variables", ) }, }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { mockCommandFactory(t, tt) e, out := prepareExecutorForCleanup(t, tt) if tt.adjustExecutor != nil { tt.adjustExecutor(t, e) } e.Cleanup() assertOutput(t, tt, out) }) } } func TestExecutor_Run(t *testing.T) { tests := map[string]executorTestCase{ "Run fails on tempdir operations": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", }), doNotMockCommandFactory: true, adjustExecutor: func(t *testing.T, e *executor) { curDir, err := os.Getwd() require.NoError(t, err) e.tempDir = filepath.Join(curDir, "unknown") }, expectedError: func() string { if runtime.GOOS == "windows" { return "The system cannot find the file specified" } return "no such file or directory" }(), }, "Run executes job": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", }), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.RunExec, executable) assert.Len(t, args, 2) assert.Equal(t, "build_script", args[1]) }, }, "Run executes job with error": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", CleanupExec: "unknown", }), commandErr: errors.New("test-error"), assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { assert.Equal(t, tt.config.Custom.RunExec, executable) }, expectedError: "test-error", }, "custom executor set with valid job_env, verify variable order and prefix": { config: getRunnerConfig(&common.CustomConfig{ RunExec: "bash", }), adjustExecutor: func(t *testing.T, e *executor) { e.jobEnv = map[string]string{"FOO": "Hello"} }, assertCommandFactory: func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { require.True(t, len(cmdOpts.Env) >= 2, "cmdOpts.Env must contain 2 elements or more") assert.Equal(t, "FOO=Hello", cmdOpts.Env[0], "first env var must be FOO") assert.True( t, strings.HasPrefix(cmdOpts.Env[1], "CUSTOM_ENV_"), "must be followed by CUSTOM_ENV_* variables", ) }, }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { mockCommandFactory(t, tt) e, options, out := prepareExecutor(t, tt) err := e.Prepare(options) require.NoError(t, err) if tt.adjustExecutor != nil { tt.adjustExecutor(t, e) } err = e.Run(common.ExecutorCommand{ Context: t.Context(), Stage: "step_script", }) assertOutput(t, tt, out) if tt.expectedError == "" { assert.NoError(t, err) return } require.Error(t, err) assert.Contains(t, err.Error(), tt.expectedError) }) } } func TestExecutor_Env(t *testing.T) { ciJobImageEnv := "CUSTOM_ENV_CI_JOB_IMAGE" runnerConfig := getRunnerConfig(&common.CustomConfig{ RunExec: "bash", PrepareExec: "echo", CleanupExec: "bash", }) assertCommandFactory := func(expectedImageName string) func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { return func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { for _, env := range cmdOpts.Env { pair := strings.Split(env, "=") if pair[0] == ciJobImageEnv { assert.Equal(t, expectedImageName, pair[1]) break } } } } adjustExecutorFactory := func(imageName string) func(t *testing.T, e *executor) { return func(t *testing.T, e *executor) { // the build is assumed to be non-nil across the executor codebase e.Build.Image = spec.Image{Name: imageName} } } tests := map[string]executorTestCase{ "custom executor set " + ciJobImageEnv: { config: runnerConfig, adjustExecutor: adjustExecutorFactory("test_image"), assertCommandFactory: assertCommandFactory("test_image"), }, "custom executor set empty " + ciJobImageEnv: { config: runnerConfig, adjustExecutor: adjustExecutorFactory(""), assertCommandFactory: assertCommandFactory(""), }, "custom executor set expanded " + ciJobImageEnv: { config: runnerConfig, adjustExecutor: func(t *testing.T, e *executor) { e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: "to_expand", Value: "expanded", }) adjustExecutorFactory("image:$to_expand")(t, e) }, assertCommandFactory: assertCommandFactory("image:expanded"), }, "custom executor set no variable to expand " + ciJobImageEnv: { config: runnerConfig, adjustExecutor: adjustExecutorFactory("image:$nothing_to_expand"), assertCommandFactory: assertCommandFactory("image:"), }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { mockCommandFactory(t, tt) e, options, _ := prepareExecutor(t, tt) e.Config = *options.Config e.Build = options.Build e.BuildLogger = options.BuildLogger if tt.adjustExecutor != nil { tt.adjustExecutor(t, e) } err := e.Prepare(options) assert.NoError(t, err) err = e.Run(common.ExecutorCommand{ Context: t.Context(), }) assert.NoError(t, err) e.Cleanup() }) } } func TestExecutor_ServicesEnv(t *testing.T) { const CIJobServicesEnv = "CUSTOM_ENV_CI_JOB_SERVICES" runnerConfig := getRunnerConfig(&common.CustomConfig{ RunExec: "bash", PrepareExec: "echo", CleanupExec: "bash", }) adjustExecutorServices := func(services spec.Services) func(t *testing.T, e *executor) { return func(t *testing.T, e *executor) { e.Build.Services = services } } assertEnvValue := func(expectedServices []jsonService) func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { return func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { for _, env := range cmdOpts.Env { pair := strings.Split(env, "=") if pair[0] == CIJobServicesEnv { expectedServicesSerialized, _ := json.Marshal(expectedServices) assert.Equal(t, string(expectedServicesSerialized), pair[1]) break } } } } assertEmptyEnv := func() func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { return func( t *testing.T, tt executorTestCase, ctx context.Context, executable string, args []string, cmdOpts process.CommandOptions, options command.Options, ) { for _, env := range cmdOpts.Env { pair := strings.Split(env, "=") if pair[0] == CIJobServicesEnv { assert.Equal(t, "", pair[1]) break } } } } tests := map[string]executorTestCase{ "returns only name when service name is the only definition": { config: runnerConfig, adjustExecutor: adjustExecutorServices(spec.Services{ { Name: "ruby:latest", }, }), assertCommandFactory: assertEnvValue( []jsonService{ { Name: "ruby:latest", Alias: "", Entrypoint: nil, Command: nil, }, }, ), }, "returns full service definition": { config: runnerConfig, adjustExecutor: adjustExecutorServices(spec.Services{ { Name: "ruby:latest", Alias: "henk-ruby", Entrypoint: []string{"path", "to", "entrypoint"}, Command: []string{"path", "to", "command"}, }, }), assertCommandFactory: assertEnvValue( []jsonService{ { Name: "ruby:latest", Alias: "henk-ruby", Entrypoint: []string{"path", "to", "entrypoint"}, Command: []string{"path", "to", "command"}, }, }, ), }, "returns both simple and full service definitions": { config: runnerConfig, adjustExecutor: adjustExecutorServices(spec.Services{ { Name: "python:latest", Alias: "henk-python", Entrypoint: []string{"entrypoint.sh"}, Command: []string{"command --test"}, }, { Name: "python:alpine", }, }), assertCommandFactory: assertEnvValue( []jsonService{ { Name: "python:latest", Alias: "henk-python", Entrypoint: []string{"entrypoint.sh"}, Command: []string{"command --test"}, }, { Name: "python:alpine", Alias: "", Entrypoint: nil, Command: nil, }, }, ), }, "does not create env CI_JOB_SERVICES": { config: runnerConfig, adjustExecutor: adjustExecutorServices(spec.Services{}), assertCommandFactory: assertEmptyEnv(), }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { mockCommandFactory(t, tt) e, options, _ := prepareExecutor(t, tt) e.Config = *options.Config e.Build = options.Build e.BuildLogger = options.BuildLogger if tt.adjustExecutor != nil { tt.adjustExecutor(t, e) } err := e.Prepare(options) assert.NoError(t, err) err = e.Run(common.ExecutorCommand{ Context: t.Context(), }) assert.NoError(t, err) e.Cleanup() }) } } ================================================ FILE: executors/custom/integration_test.go ================================================ //go:build integration package custom_test import ( "fmt" "os" "os/exec" "path/filepath" "runtime" "strings" "testing" "time" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildtest" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors/custom" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/command" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/session" "gitlab.com/gitlab-org/gitlab-runner/shells/shellstest" ) var ( testExecutorFile string testJobInfo = spec.JobInfo{ Name: "test job", Stage: "test", ProjectID: 0, ProjectName: "test project", } ) const integrationTestCustomExecutor = "custom-integration-test" var runnerPath string func TestMain(m *testing.M) { code := 1 defer func() { os.Exit(code) }() fmt.Println("Compiling test executor") targetDir, err := os.MkdirTemp("", "test_executor") if err != nil { panic("Error on preparing tmp directory for test executor binary") } defer os.RemoveAll(targetDir) testExecutorFile = filepath.Join(targetDir, "main") testExecutorFile = buildtest.MustBuildBinary("testdata/test_executor/main.go", testExecutorFile) runnerPath = buildtest.MustBuildBinary("../..", filepath.Join(targetDir, "gitlab-runner-integration")) code = m.Run() } func newBuild(t *testing.T, jobResponse spec.Job, shell string) *common.Build { dir := t.TempDir() t.Log("Build directory:", dir) jobResponse.JobInfo = testJobInfo build := &common.Build{ Job: jobResponse, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ BuildsDir: filepath.Join(dir, "builds"), CacheDir: filepath.Join(dir, "cache"), Executor: integrationTestCustomExecutor, Shell: shell, Custom: &common.CustomConfig{ ConfigExec: testExecutorFile, ConfigArgs: []string{shell, "config"}, PrepareExec: testExecutorFile, PrepareArgs: []string{shell, "prepare"}, RunExec: testExecutorFile, RunArgs: []string{shell, "run"}, CleanupExec: testExecutorFile, CleanupArgs: []string{shell, "cleanup"}, GracefulKillTimeout: timeoutInSeconds(10 * time.Second), ForceKillTimeout: timeoutInSeconds(10 * time.Second), }, }, }, ExecutorProvider: custom.NewProvider(runnerPath), SystemInterrupt: make(chan os.Signal, 1), Session: &session.Session{ DisconnectCh: make(chan error), TimeoutCh: make(chan error), }, } return build } func timeoutInSeconds(duration time.Duration) *int { seconds := duration.Seconds() secondsInInt := int(seconds) return &secondsInInt } func TestBuildSuccess(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) err = buildtest.RunBuild(t, build) assert.NoError(t, err) }) } func TestBuildScriptSections(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { if shell == "pwsh" || shell == "powershell" { // support for pwsh and powershell tracked in https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28119 t.Skip("pwsh, powershell not supported") } successfulBuild, err := common.GetSuccessfulMultilineCommandBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) require.NoError(t, err) buildtest.RunBuildWithSections(t, build) }) } func TestBuildSuccessRawVariable(t *testing.T) { tests := map[string]struct { command string }{ "bash": { command: "echo $TEST", }, "powershell": { command: "echo $env:TEST", }, "pwsh": { command: "echo $env:TEST", }, } shellstest.OnEachShell(t, func(t *testing.T, shell string) { test, ok := tests[shell] if !ok { t.Skip() } successfulBuild, err := common.GetRemoteBuildResponse(test.command) require.NoError(t, err) build := newBuild(t, successfulBuild, shell) value := "$VARIABLE$WITH$DOLLARS$$" build.Variables = append(build.Variables, spec.Variable{ Key: "TEST", Value: value, Raw: true, }) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, value) }) } func TestBuildBuildFailure(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append(build.Variables, spec.Variable{ Key: "IS_BUILD_ERROR", Value: "true", Public: true, }) err = buildtest.RunBuild(t, build) assert.Error(t, err) var buildErr *common.BuildError assert.ErrorAs(t, err, &buildErr) assert.Equal(t, command.BuildFailureExitCode, buildErr.ExitCode) }) } func TestBuildSystemFailure(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append(build.Variables, spec.Variable{ Key: "IS_SYSTEM_ERROR", Value: "true", Public: true, }) err = buildtest.RunBuild(t, build) assert.Error(t, err) var exitError *exec.ExitError assert.ErrorAs(t, err, &exitError) t.Log(err) }) } func TestBuildUnknownFailure(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append(build.Variables, spec.Variable{ Key: "IS_UNKNOWN_ERROR", Value: "true", Public: true, }) err = buildtest.RunBuild(t, build) assert.Error(t, err) var errUnknownFailure *command.ErrUnknownFailure assert.ErrorAs(t, err, &errUnknownFailure) assert.Equal(t, 255, errUnknownFailure.ExitCode) }) } func TestBuildCancel(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { build := newBuild(t, spec.Job{}, shell) buildtest.RunBuildWithCancel(t, build.Runner, setupExecutor) }) } func TestBuildMasking(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { build := newBuild(t, spec.Job{}, shell) buildtest.RunBuildWithMasking(t, build.Runner, setupExecutor) }) } func TestBuildWithGitStrategyCloneWithoutLFS(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Runner.PreGetSourcesScript = "echo pre-clone-script" build.Runner.PostGetSourcesScript = "echo post-clone-script" build.Variables = append(build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "clone"}) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) assert.Contains(t, out, "pre-clone-script") assert.Contains(t, out, "post-clone-script") }) } func TestBuildWithGitStrategyCloneNoCheckoutWithoutLFS(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Runner.PreGetSourcesScript = "echo pre-clone-script" build.Runner.PostGetSourcesScript = "echo post-clone-script" build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "clone"}, spec.Variable{Key: "GIT_CHECKOUT", Value: "false"}, ) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") assert.Contains(t, out, "Skipping Git checkout") assert.Contains(t, out, "pre-clone-script") assert.Contains(t, out, "post-clone-script") }) } func TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyNone(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "none"}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: "recursive"}, ) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.NotContains(t, out, "Created fresh repository") assert.NotContains(t, out, "Fetching changes") assert.Contains(t, out, "Skipping Git repository setup") assert.NotContains(t, out, "Updating/initializing submodules...") assert.NotContains(t, out, "Updating/initializing submodules recursively...") assert.Contains(t, out, "Skipping Git submodules setup") }) } func TestBuildWithGitSubmoduleStrategyRecursiveAndGitStrategyEmpty(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append( build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "empty"}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: "recursive"}, ) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Skipping Git repository setup and creating an empty build directory") assert.Contains(t, out, "Skipping Git submodules setup") assert.NotContains(t, out, "Created fresh repository") assert.NotContains(t, out, "Fetching changes") assert.NotContains(t, out, "Updating/initializing submodules...") assert.NotContains(t, out, "Updating/initializing submodules recursively...") }) } func TestBuildWithoutDebugTrace(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) // The default build shouldn't have debug tracing enabled out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.NotRegexp(t, `[^$] echo Hello World`, out) }) } func TestBuildWithDebugTrace(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append(build.Variables, spec.Variable{Key: "CI_DEBUG_TRACE", Value: "true"}) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Regexp(t, `(>|[^$] )echo Hello World`, out) }) } func TestBuildMultilineCommand(t *testing.T) { buildGenerators := map[string]func() (spec.Job, error){ "bash": common.GetMultilineBashBuild, "powershell": common.GetMultilineBashBuildPowerShell, "pwsh": common.GetMultilineBashBuildPowerShell, } shellstest.OnEachShell(t, func(t *testing.T, shell string) { buildGenerator, ok := buildGenerators[shell] require.Truef(t, ok, "Missing build generator for shell %q", shell) multilineBuild, err := buildGenerator() require.NoError(t, err) build := newBuild(t, multilineBuild, shell) // The default build shouldn't have debug tracing enabled out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.NotContains(t, out, "echo") assert.Contains(t, out, "Hello World") assert.Contains(t, out, "collapsed multi-line command") }) } func TestBuildWithGoodGitSSLCAInfo(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteGitLabComTLSBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Runner.URL = "https://gitlab.com" out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") assert.Contains(t, out, "Updating/initializing submodules") }) } // TestBuildWithGitSSLAndStrategyFetch describes issue https://gitlab.com/gitlab-org/gitlab-runner/issues/2991 func TestBuildWithGitSSLAndStrategyFetch(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteGitLabComTLSBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Runner.PreGetSourcesScript = "echo pre-clone-script" build.Runner.PostGetSourcesScript = "echo post-clone-script" build.Variables = append(build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Fetching changes") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) assert.Contains(t, out, "pre-clone-script") assert.Contains(t, out, "post-clone-script") }) } func TestBuildChangesBranchesWhenFetchingRepo(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append(build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") // Another build using the same repo but different branch. build.GitInfo = common.GetLFSGitInfo(build.GitInfo.RepoURL) out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Checking out c8f2a61d as detached HEAD (ref is add-lfs-object)...") }) } func TestBuildPowerShellCatchesExceptions(t *testing.T) { tests := map[string]struct { cleanGitConfig *bool expectFreshRepoMessage bool }{ "no git cleanup": { expectFreshRepoMessage: true, }, "git cleanup explicitly enabled": { cleanGitConfig: &[]bool{true}[0], expectFreshRepoMessage: true, }, "git cleanup explicitly disabled": { cleanGitConfig: &[]bool{false}[0], expectFreshRepoMessage: false, }, } for _, shell := range []string{"powershell", "pwsh"} { t.Run(shell, func(t *testing.T) { for name, test := range tests { t.Run(name, func(t *testing.T) { helpers.SkipIntegrationTests(t, shell) successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) build.Variables = append( build.Variables, spec.Variable{Key: "ErrorActionPreference", Value: "Stop"}, spec.Variable{Key: "GIT_STRATEGY", Value: "fetch"}, ) build.Runner.RunnerSettings.CleanGitConfig = test.cleanGitConfig checkFreshRepoMessage := assert.NotContains if test.expectFreshRepoMessage { checkFreshRepoMessage = assert.Contains } out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Created fresh repository") out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) checkFreshRepoMessage(t, out, "Created fresh repository") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) build.Variables = append( build.Variables, spec.Variable{Key: "ErrorActionPreference", Value: "Continue"}, ) out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) checkFreshRepoMessage(t, out, "Created fresh repository") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) build.Variables = append( build.Variables, spec.Variable{Key: "ErrorActionPreference", Value: "SilentlyContinue"}, ) out, err = buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) checkFreshRepoMessage(t, out, "Created fresh repository") assert.Regexp(t, "Checking out [a-f0-9]+ as", out) }) } }) } } func TestBuildOnCustomDirectory(t *testing.T) { commands := map[string]string{ "bash": "pwd", "powershell": "pwd", "pwsh": "pwd", } tests := map[string]bool{ "custom directory defined": true, "custom directory not defined": false, } shellstest.OnEachShell(t, func(t *testing.T, shell string) { for testName, tt := range tests { t.Run(testName, func(t *testing.T) { cmd, ok := commands[shell] require.Truef(t, ok, "Missing command for shell %q", shell) tempDir := os.TempDir() dir := filepath.Join(tempDir, "custom", "directory") expectedDirectory := filepath.Join(dir, "0") // On Windows we don't check for the full path because Go can sometimes produce // a Windows short path and the shell a full path, resulting in a mismatch. if runtime.GOOS == "windows" { expectedDirectory = strings.TrimPrefix(expectedDirectory, tempDir) } successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) successfulBuild.Steps[0].Script = spec.StepScript{cmd} build := newBuild(t, successfulBuild, shell) if tt { build.Variables = append(build.Variables, spec.Variable{ Key: "IS_RUN_ON_CUSTOM_DIR", Value: dir, Public: true, }) } out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) if tt { assert.Contains(t, out, expectedDirectory) } else { assert.NotContains(t, out, expectedDirectory) } }) } }) } func TestBuildLogLimitExceeded(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { build := newBuild(t, spec.Job{}, shell) buildtest.RunBuildWithJobOutputLimitExceeded(t, build.Runner, setupExecutor) }) } func TestBuildWithAccessToJobResponseFile(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) output, err := buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.Contains(t, output, "job ID => 0") assert.Contains(t, output, fmt.Sprintf("job name => %s", testJobInfo.Name)) assert.Contains(t, output, fmt.Sprintf("job stage => %s", testJobInfo.Stage)) assert.Contains(t, output, fmt.Sprintf("job project ID => %d", testJobInfo.ProjectID)) assert.Contains(t, output, fmt.Sprintf("job project name => %s", testJobInfo.ProjectName)) }) } func TestCleanupProjectGitClone(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetSuccessfulBuild() require.NoError(t, err) build := newBuild(t, successfulBuild, shell) buildtest.RunBuildWithCleanupGitClone(t, build) }) } func TestCleanupProjectGitFetch(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { untrackedFilename := "untracked" successfulBuild, err := common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands(untrackedFilename, "", "")..., ) require.NoError(t, err) build := newBuild(t, successfulBuild, shell) buildtest.RunBuildWithCleanupGitFetch(t, build, untrackedFilename) }) } func TestCleanupProjectGitSubmoduleNormal(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { untrackedFile := "untracked" untrackedSubmoduleFile := "untracked_submodule" successfulBuild, err := common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands(untrackedFile, untrackedSubmoduleFile, "")..., ) require.NoError(t, err) build := newBuild(t, successfulBuild, shell) buildtest.RunBuildWithCleanupNormalSubmoduleStrategy(t, build, untrackedFile, untrackedSubmoduleFile) }) } func TestCleanupProjectGitSubmoduleRecursive(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { untrackedFile := "untracked" untrackedSubmoduleFile := "untracked_submodule" untrackedSubSubmoduleFile := "untracked_submodule_submodule" successfulBuild, err := common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands( untrackedFile, untrackedSubmoduleFile, untrackedSubSubmoduleFile, )..., ) require.NoError(t, err) build := newBuild(t, successfulBuild, shell) buildtest.RunBuildWithCleanupRecursiveSubmoduleStrategy(t, build, untrackedFile, untrackedSubmoduleFile, untrackedSubSubmoduleFile) }) } func setupExecutor(t *testing.T, build *common.Build) { build.ExecutorProvider = custom.NewProvider(runnerPath) } ================================================ FILE: executors/custom/terminal.go ================================================ //go:build !windows package custom import ( "errors" terminalsession "gitlab.com/gitlab-org/gitlab-runner/session/terminal" ) func (e *executor) TerminalConnect() (terminalsession.Conn, error) { return nil, errors.New("not yet supported") } ================================================ FILE: executors/custom/terminal_test.go ================================================ //go:build !integration && !windows package custom import ( "testing" "github.com/stretchr/testify/assert" ) func TestExecutor_Connect(t *testing.T) { e := new(executor) connection, err := e.TerminalConnect() assert.Nil(t, connection) assert.EqualError(t, err, "not yet supported") } ================================================ FILE: executors/custom/testdata/test_executor/.gitignore ================================================ main main.exe ================================================ FILE: executors/custom/testdata/test_executor/main.go ================================================ package main import ( "bytes" "encoding/json" "fmt" "os" "os/exec" "path/filepath" "strconv" "gitlab.com/gitlab-org/gitlab-runner/executors/custom/api" ) const ( isBuildError = "CUSTOM_ENV_IS_BUILD_ERROR" isSystemError = "CUSTOM_ENV_IS_SYSTEM_ERROR" isUnknownError = "CUSTOM_ENV_IS_UNKNOWN_ERROR" isRunOnCustomDir = "CUSTOM_ENV_IS_RUN_ON_CUSTOM_DIR" ) const ( stageConfig = "config" stagePrepare = "prepare" stageRun = "run" stageCleanup = "cleanup" ) var knownBuildStages = map[string]struct{}{ "prepare_script": {}, "get_sources": {}, "restore_cache": {}, "download_artifacts": {}, "build_script": {}, "after_script": {}, "archive_cache": {}, "archive_cache_on_failure": {}, "upload_artifacts_on_success": {}, "upload_artifacts_on_failure": {}, "cleanup_file_variables": {}, } func setBuildFailure(msg string, args ...interface{}) { fmt.Println("setting build failure") setFailure(api.BuildFailureExitCodeVariable, msg, args...) } func setSystemFailure(msg string, args ...interface{}) { fmt.Println("setting system failure") setFailure(api.SystemFailureExitCodeVariable, msg, args...) } func setFailure(failureType string, msg string, args ...interface{}) { fmt.Println() fmt.Printf(msg, args...) fmt.Println() exitCode := os.Getenv(failureType) code, err := strconv.Atoi(exitCode) if err != nil { panic(fmt.Sprintf("Error while parsing the variable: %v", err)) } fmt.Printf("Exitting with code %d\n", code) os.Exit(code) } func printJobResponseDetails() { type fakeJobInfo struct { Name string `json:"name"` Stage string `json:"stage"` ProjectID int `json:"project_id"` ProjectName string `json:"project_name"` } type fakeJobResponse struct { ID int `json:"id"` JobInfo fakeJobInfo `json:"job_info"` } jobResponseFile := os.Getenv(api.JobResponseFileVariable) file, err := os.Open(jobResponseFile) if err != nil { panic(fmt.Sprintf("Error while opening job response file %q: %v", jobResponseFile, err)) } defer func() { _ = file.Close() }() var jobResponse fakeJobResponse decoder := json.NewDecoder(file) err = decoder.Decode(&jobResponse) if err != nil { panic(fmt.Sprintf("Error while decoding job response file %q: %v", jobResponseFile, err)) } fmt.Println("Reading job response data:") fmt.Printf("job ID => %d\n", jobResponse.ID) fmt.Printf("job name => %s\n", jobResponse.JobInfo.Name) fmt.Printf("job stage => %s\n", jobResponse.JobInfo.Stage) fmt.Printf("job project ID => %d\n", jobResponse.JobInfo.ProjectID) fmt.Printf("job project name => %s\n", jobResponse.JobInfo.ProjectName) fmt.Println() } type stageFunc func(shell string, args []string) func main() { defer func() { r := recover() if r == nil { return } setSystemFailure("Executor panicked with: %v", r) }() shell := os.Args[1] stage := os.Args[2] var args []string if len(os.Args) > 3 { args = os.Args[3:] } stages := map[string]stageFunc{ stageConfig: config, stagePrepare: prepare, stageRun: run, stageCleanup: cleanup, } stageFn, ok := stages[stage] if !ok { setSystemFailure("Unknown stage %q", stage) } _, _ = fmt.Fprintf(os.Stderr, "Custom Executor binary - %q stage\n", stage) _, _ = fmt.Fprintf(os.Stderr, "Mocking execution of: %v\n", args) _, _ = fmt.Fprintln(os.Stderr) stageFn(shell, args) } func config(shell string, args []string) { customDir := os.Getenv(isRunOnCustomDir) if customDir == "" { return } concurrentID := os.Getenv("CUSTOM_ENV_CI_CONCURRENT_PROJECT_ID") projectSlug := os.Getenv("CUSTOM_ENV_CI_PROJECT_PATH_SLUG") dir := filepath.Join(customDir, concurrentID, projectSlug) type output struct { BuildsDir string `json:"builds_dir"` } jsonOutput, err := json.Marshal(output{BuildsDir: dir}) if err != nil { panic(fmt.Errorf("error while creating JSON output: %w", err)) } fmt.Print(string(jsonOutput)) } func prepare(shell string, args []string) { fmt.Println("PREPARE doesn't accept any arguments. It just does its job") fmt.Println() printJobResponseDetails() } func run(shell string, args []string) { fmt.Println("RUN accepts two arguments: the path to the script to execute and the stage of the job") fmt.Println() mockError() if len(args) < 1 { setSystemFailure("Missing script for the run stage") } output := bytes.NewBuffer(nil) cmd := createCommand(shell, args[0], args[1]) cmd.Stdout = output cmd.Stderr = output fmt.Printf("Executing: %#v\n\n", cmd) err := cmd.Run() if err != nil { setBuildFailure("Job script exited with: %v", err) } fmt.Printf(">>>>>>>>>>\n%s\n<<<<<<<<<<\n\n", output.String()) } func mockError() { if len(os.Getenv(isBuildError)) > 0 { // It's a build error. For example: user used an invalid // command in his script which ends with an error thrown // from the underlying shell. setBuildFailure("mocked build failure") } if len(os.Getenv(isSystemError)) > 0 { // It's a system error. For example: the Custom Executor // script implements a libvirt executor and before executing // the job it needs to prepare the VM. But the preparation // failed. setSystemFailure("mocked system failure") } if len(os.Getenv(isUnknownError)) > 0 { // This situation should not happen. Custom Executor script // should define the type of failure and return either "build // failure" or "system failure", using the error code values // provided by dedicated variables. fmt.Println("mocked system failure") os.Exit(255) } } func createCommand(shell string, script string, stage string) *exec.Cmd { if _, ok := knownBuildStages[stage]; !ok { setSystemFailure("Unknown build stage %q", stage) } shellConfigs := map[string]struct { command string args []string }{ "bash": { command: "bash", args: []string{}, }, "powershell": { command: "powershell", args: []string{"-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command"}, }, "pwsh": { command: "pwsh", args: []string{"-NoProfile", "-NonInteractive", "-ExecutionPolicy", "Bypass", "-Command"}, }, } shellConfig, ok := shellConfigs[shell] if !ok { panic(fmt.Sprintf("Unknown shell %q", shell)) } args := append(shellConfig.args, script) return exec.Command(shellConfig.command, args...) } func cleanup(shell string, args []string) { fmt.Println("CLEANUP doesn't accept any arguments. It just does its job") fmt.Println() } ================================================ FILE: executors/default_executor_provider.go ================================================ package executors import ( "errors" "gitlab.com/gitlab-org/gitlab-runner/common" ) type DefaultExecutorProvider struct { Creator func() common.Executor FeaturesUpdater func(features *common.FeaturesInfo) ConfigUpdater func(input *common.RunnerConfig, output *common.ConfigInfo) DefaultShellName string } func (e DefaultExecutorProvider) CanCreate() bool { return e.Creator != nil } func (e DefaultExecutorProvider) Create() common.Executor { if e.Creator == nil { return nil } return e.Creator() } func (e DefaultExecutorProvider) Acquire(config *common.RunnerConfig) (common.ExecutorData, error) { return nil, nil } func (e DefaultExecutorProvider) Release(config *common.RunnerConfig, data common.ExecutorData) {} func (e DefaultExecutorProvider) GetFeatures(features *common.FeaturesInfo) error { if e.FeaturesUpdater == nil { return errors.New("cannot evaluate features") } e.FeaturesUpdater(features) return nil } func (e DefaultExecutorProvider) GetConfigInfo(input *common.RunnerConfig, output *common.ConfigInfo) { if e.ConfigUpdater == nil { return } e.ConfigUpdater(input, output) } func (e DefaultExecutorProvider) GetDefaultShell() string { return e.DefaultShellName } ================================================ FILE: executors/docker/autoscaler/autoscaler.go ================================================ package autoscaler import ( "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors/internal/autoscaler" ) func NewProvider(dockerProvider common.ExecutorProvider) common.ExecutorProvider { return autoscaler.New( dockerProvider, autoscaler.Config{MapJobImageToVMImage: false}, ) } ================================================ FILE: executors/docker/autoscaler/autoscaler_integration_test.go ================================================ //go:build integration package autoscaler_test import ( "context" "math/rand" "os" "strconv" "testing" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/network" "github.com/docker/docker/client" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildtest" "gitlab.com/gitlab-org/gitlab-runner/common/spec" docker_executor "gitlab.com/gitlab-org/gitlab-runner/executors/docker" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/autoscaler" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/ssh" "gitlab.com/gitlab-org/gitlab-runner/shells/shellstest" ) func newRunnerConfig(t *testing.T, shell string) *common.RunnerConfig { helpers.SkipIntegrationTests(t, "fleeting-plugin-static", "--version") // In theory, pwsh should work if getImage() is upgraded to use the alpine powershell image, // however, in practice, we get errors in CI with the pwsh helper image selected. // TODO: fix this for pwsh when using pwsh helper image if shell == "pwsh" || shell == "powershell" { t.Skip() } dir := t.TempDir() t.Log("Build directory:", dir) srv, err := ssh.NewStubServer("root", "password") require.NoError(t, err) t.Cleanup(func() { require.NoError(t, srv.Stop()) }) srv.ExecuteLocal = true image := getImage() return &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "runner-token", }, RunnerSettings: common.RunnerSettings{ BuildsDir: dir, Executor: "docker-autoscaler", Shell: shell, Cache: &cacheconfig.Config{}, Docker: &common.DockerConfig{ Image: image, }, Autoscaler: &common.AutoscalerConfig{ MaxUseCount: 1, CapacityPerInstance: 1, MaxInstances: 1, Plugin: "fleeting-plugin-static", PluginConfig: common.AutoscalerSettingsMap{ "instances": map[string]map[string]string{ "local": { "username": srv.User, "password": srv.Password, "timeout": "1m", "external_addr": srv.Host() + ":" + srv.Port(), "internal_addr": srv.Host() + ":" + srv.Port(), }, }, }, }, }, } } func setupAcquireBuild(t *testing.T, build *common.Build) { dockerProvider := docker_executor.NewProvider() provider := autoscaler.NewProvider(dockerProvider) data, err := provider.Acquire(build.Runner) require.NoError(t, err) build.ExecutorData = data build.ExecutorProvider = provider t.Cleanup(func() { provider.Release(build.Runner, build.ExecutorData) if shutdownable, ok := provider.(common.ManagedExecutorProvider); ok { shutdownable.Shutdown(context.Background(), nil) } }) } func TestBuildSuccess(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: newRunnerConfig(t, shell), } setupAcquireBuild(t, build) require.NoError(t, buildtest.RunBuild(t, build)) }) } func TestBuildTimeout(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) successfulBuild.Steps[0].Script = []string{"sleep 60"} successfulBuild.RunnerInfo.Timeout = 15 build := &common.Build{ Job: successfulBuild, Runner: newRunnerConfig(t, shell), } setupAcquireBuild(t, build) runnerID := rand.Intn(999999999) build.ProjectRunnerID = runnerID build.Variables = append(successfulBuild.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: "true", }) // run a job that times out err = buildtest.RunBuild(t, build) require.ErrorContains(t, err, "execution took longer than 15s seconds") // new docker client client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") defer client.Close() nameFilter := filters.Arg("name", "-"+strconv.Itoa(runnerID)+"-") // ensure no build/predefine containers for this job were left behind containers, err := client.ContainerList(context.Background(), container.ListOptions{ Filters: filters.NewArgs(nameFilter), }) require.NoError(t, err) assert.Empty(t, containers) // ensure no networks for this job were left behind networks, err := client.NetworkList(context.Background(), network.ListOptions{ Filters: filters.NewArgs(nameFilter), }) require.NoError(t, err) assert.Empty(t, networks) // ensure no volumes for this job were left behind // unfortunately there isn't an API to list volumes... }) } func TestBuildSuccessUsingDockerHost(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: newRunnerConfig(t, shell), } // explicitly set the docker host, which will override the use of connecting // via "dial-stdio" to ensure this method of connecting is still possible. if host := os.Getenv("DOCKER_HOST"); host != "" { build.Runner.Docker.Host = host } else { build.Runner.Docker.Host = client.DefaultDockerHost } setupAcquireBuild(t, build) require.NoError(t, buildtest.RunBuild(t, build)) }) } func TestBuildSuccessUsingDockerHostLegacyTunnel(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: featureflags.UseDockerAutoscalerDialStdio, Value: "false", }) build := &common.Build{ Job: successfulBuild, Runner: newRunnerConfig(t, shell), } setupAcquireBuild(t, build) require.NoError(t, buildtest.RunBuild(t, build)) }) } func TestBuildCancel(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { buildtest.RunBuildWithCancel(t, newRunnerConfig(t, shell), setupAcquireBuild) }) } func TestBuildMasking(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { buildtest.RunBuildWithMasking(t, newRunnerConfig(t, shell), setupAcquireBuild) }) } func TestBuildExpandedFileVariable(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { buildtest.RunBuildWithExpandedFileVariable(t, newRunnerConfig(t, shell), setupAcquireBuild) }) } ================================================ FILE: executors/docker/autoscaler/autoscaler_integration_unix_test.go ================================================ //go:build integration && !windows package autoscaler_test import "gitlab.com/gitlab-org/gitlab-runner/common" func getImage() string { return common.TestAlpineImage } ================================================ FILE: executors/docker/autoscaler/autoscaler_integration_windows_test.go ================================================ //go:build integration && windows package autoscaler_test import ( "fmt" syswindows "golang.org/x/sys/windows" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/helpers/container/windows" ) func getImage() string { v := syswindows.RtlGetVersion() windowsVersion := fmt.Sprintf("%v.%v.%v", v.MajorVersion, v.MinorVersion, v.BuildNumber) windowsVersion, _ = windows.Version(windowsVersion) return fmt.Sprintf(common.TestWindowsImage, "ltsc"+windowsVersion) } ================================================ FILE: executors/docker/config_updater.go ================================================ package docker import ( "strings" "gitlab.com/gitlab-org/gitlab-runner/common" ) func configUpdater(input *common.RunnerConfig, output *common.ConfigInfo) { if input.RunnerSettings.Docker != nil { output.Gpus = strings.Trim(input.RunnerSettings.Docker.Gpus, " ") } } ================================================ FILE: executors/docker/config_updater_test.go ================================================ //go:build !integration package docker import ( "strings" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" ) func TestDockerConfigUpdate(t *testing.T) { testCases := map[string]struct { gpus string }{ "gpus set to all": { gpus: "all", }, "gpus with trailing space": { gpus: " ", }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { config := common.RunnerConfig{ RunnerSettings: common.RunnerSettings{Docker: &common.DockerConfig{Gpus: tc.gpus}}, } info := common.ConfigInfo{} configUpdater(&config, &info) assert.Equal(t, strings.Trim(tc.gpus, " "), info.Gpus) }) } } ================================================ FILE: executors/docker/consts.go ================================================ package docker import "time" const dockerCleanupTimeout = 5 * time.Minute const waitForContainerTimeout = 15 * time.Second const osTypeLinux = "linux" const osTypeWindows = "windows" const osTypeFreeBSD = "freebsd" ================================================ FILE: executors/docker/docker.go ================================================ package docker import ( "bytes" "compress/gzip" "context" "encoding/base64" "encoding/json" "errors" "fmt" "io" "net" "os" "sort" "strconv" "strings" "sync" "time" "github.com/bmatcuk/doublestar/v4" "github.com/containerd/errdefs" "github.com/docker/cli/opts" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/image" "github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/system" "github.com/docker/docker/client" "github.com/docker/docker/pkg/stdcopy" "github.com/hashicorp/go-version" v1 "github.com/opencontainers/image-spec/specs-go/v1" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/exec" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/networks" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/prebuilt" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/pull" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/permission" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/wait" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/container/helperimage" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/limitwriter" "gitlab.com/gitlab-org/gitlab-runner/shells" ) const ( ExecutorStagePrepare common.ExecutorStage = "docker_prepare" ExecutorStageRun common.ExecutorStage = "docker_run" ExecutorStageCleanup common.ExecutorStage = "docker_cleanup" ExecutorStageBootstrap common.ExecutorStage = "docker_bootstrap" ExecutorStageCreatingBuildVolumes common.ExecutorStage = "docker_creating_build_volumes" ExecutorStageCreatingServices common.ExecutorStage = "docker_creating_services" ExecutorStageCreatingUserVolumes common.ExecutorStage = "docker_creating_user_volumes" ExecutorStagePullingImage common.ExecutorStage = "docker_pulling_image" ServiceLogOutputLimit = 64 * 1024 labelServiceType = "service" labelWaitType = "wait" // internalFakeTunnelHostname is an internal hostname we provide the Docker client // when we provide a tunnelled dialer implementation. Because we're overriding // the dialer, this domain should never be used by the client, but we use the // reserved TLD ".invalid" for safety. internalFakeTunnelHostname = "http://internal.tunnel.invalid" // runnerJobVarsNames is the name used to identify the all the job variables names. // It is used to allow step-runner to filter these variables once the gRPC service is started runnerJobVarsNames = "RUNNER_JOB_VAR_NAMES" ) var neverRestartPolicy = container.RestartPolicy{Name: "no"} var ( errVolumesManagerUndefined = errors.New("volumesManager is undefined") errNetworksManagerUndefined = errors.New("networksManager is undefined") ) type executor struct { executors.AbstractExecutor volumeParser parser.Parser newVolumePermissionSetter func() (permission.Setter, error) info system.Info serverAPIVersion *version.Version waiter wait.KillWaiter temporary []string // IDs of containers that should be removed buildContainerID string services []*serviceInfo // links used to use docker 'links' feature, which tied containers together // so that their hosts would resolve. // // This feature is now deprecated, but we emulate it using ExtraHosts, and // therefore links is now an array of ":" that // is provided to every container. links []string devices []container.DeviceMapping deviceRequests []container.DeviceRequest helperImageInfo helperimage.Info volumesManager volumes.Manager networksManager networks.Manager labeler labels.Labeler pullManager pull.Manager networkMode container.NetworkMode projectUniqRandomizedName string dockerConn *dockerConnection dockerConnector dockerConnector logConfig container.LogConfig } type dockerConnector func(ctx context.Context, options common.ExecutorPrepareOptions, executor *executor) error func (dc dockerConnector) Connect(ctx context.Context, options common.ExecutorPrepareOptions, executor *executor) error { if dc == nil { dc = connectDocker } return dc(ctx, options, executor) } type dockerTunnel struct { client executors.Client opts []client.Opt creds docker.Credentials } // newDockerTunnel returns a new dockerTunnel instance. IF the specified common.ExecutorData is of type executors.Environment, // this indicates we will be connecting to a remote docker daemon instance and should tunnel docker commands though a // executors.Client instance. In this case, the returned dockerTunnel will include a valid and initialized executors.Client // instance, with corresponding []client.Opt and docker.Credentials to initialize a docker.Client. // // Otherwise the returned dockerTunnel will have a nil executor.Client and []client.Opt, and a default docker.Credentials. func newDockerTunnel( ctx context.Context, options common.ExecutorPrepareOptions, build *common.Build, creds docker.Credentials, env common.ExecutorData, logger buildlogger.Logger, ) (*dockerTunnel, error) { if environment, ok := env.(executors.Environment); ok { tc, err := environment.Prepare(ctx, logger, options) if err != nil { return nil, fmt.Errorf("preparing environment: %w", err) } // We tunnel the docker connection for remote environments. // // To do this, we create a new dial context for Docker's client, whilst // also overridding the daemon hostname it would typically use (if it were to use // its own dialer). scheme, dialer, err := environmentDialContext(ctx, tc, creds.Host, build.IsFeatureFlagOn(featureflags.UseDockerAutoscalerDialStdio)) if err != nil { return nil, fmt.Errorf("creating env dialer: %w", err) } // If the scheme (docker uses it to define the protocol used) is "npipe" or "unix", we // need to use a "fake" host, otherwise when dialing from Linux to Windows or vice-versa // docker will complain because it doesn't think Linux can support "npipe" and doesn't // think Windows can support "unix". switch scheme { case "unix", "npipe", "dial-stdio": creds.Host = internalFakeTunnelHostname } return &dockerTunnel{ client: tc, opts: []client.Opt{client.WithDialContext(dialer)}, creds: creds, }, nil } return &dockerTunnel{client: nil, opts: nil, creds: creds}, nil } type dockerConnection struct { docker.Client tunnelClient executors.Client cancel func() } func (dc *dockerConnection) Close() error { if dc == nil { return nil } var err error if dc.Client != nil { err = dc.Client.Close() dc.Client = nil } if dc.tunnelClient != nil { err = errors.Join(err, dc.tunnelClient.Close()) dc.tunnelClient = nil } if dc.cancel != nil { dc.cancel() dc.cancel = nil } return err } // newDockerConnection returns a new dockerConnection instance using the executor.Client instance and connection info // embedded in the dockerTunnel instance returned by the factory function. If we're connecting to the local docker // daemon, the executor.Client instance will be nil (and that's OK). func newDockerConnection(dockerTunnel *dockerTunnel, cancel func()) (*dockerConnection, error) { dockerClient, err := docker.New(dockerTunnel.creds, dockerTunnel.opts...) if err != nil { return nil, fmt.Errorf("creating docker client: %w", err) } return &dockerConnection{Client: dockerClient, tunnelClient: dockerTunnel.client, cancel: cancel}, nil } // createDockerConnection creates a connection to a potentially remote docker daemon. The connection is encapsulated in // a dockerConnection object which includes a docker.Client instance and, if connecting to a remote docker daemon, an // executors.Client instance. // // Note that in the case of a remote docker daemon, we want to maintain a long-lived connection for the duration of the // job (including during the Cleanup stage). To achieve this, we don't want the context to be cancelled when the job is // cancelled or times out, so we create a new context here with a timeout of job-timeout + dockerCleanupTimeout. This // fixes https://gitlab.com/gitlab-org/gitlab-runner/-/issues/38725. func createDockerConnection(ctx context.Context, opts common.ExecutorPrepareOptions, e *executor) (*dockerConnection, error) { deadline, hasDeadline := ctx.Deadline() if !hasDeadline { deadline = time.Now().Add(e.Build.GetBuildTimeout()) } ctx, cancel := context.WithDeadline(context.Background(), deadline.Add(dockerCleanupTimeout)) dockerTunnel, err := newDockerTunnel( ctx, opts, e.Build, e.Config.Docker.Credentials, e.Build.ExecutorData, e.BuildLogger) if err != nil { cancel() return nil, fmt.Errorf("creating docker tunnel: %w", err) } return newDockerConnection(dockerTunnel, cancel) } var version1_44 = version.Must(version.NewVersion("1.44")) func (e *executor) getServiceVariables(serviceDefinition spec.Image) []string { variables := e.Build.GetAllVariables().PublicOrInternal() variables = append(variables, serviceDefinition.Variables...) return variables.Expand().StringList() } func (e *executor) expandAndGetDockerImage( imageName string, allowedImages []string, dockerOptions spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy, ) (*image.InspectResponse, error) { imageName, err := e.expandImageName(imageName, allowedImages) if err != nil { return nil, err } dockerOptions = dockerOptions.Expand(e.Build.GetAllVariables()) image, err := e.pullManager.GetDockerImage(imageName, dockerOptions, imagePullPolicies) if err != nil { return nil, err } return image, nil } func (e *executor) getHelperImage() (*image.InspectResponse, error) { if imageNameFromConfig := e.ExpandValue(e.Config.Docker.HelperImage); imageNameFromConfig != "" { e.BuildLogger.Debugln( "Pull configured helper_image for predefined container instead of import bundled image", imageNameFromConfig, "...", ) e.BuildLogger.Println("Using helper image: ", imageNameFromConfig, " (overridden, default would be ", e.helperImageInfo, ")") return e.pullManager.GetDockerImage(imageNameFromConfig, spec.ImageDockerOptions{}, nil) } e.BuildLogger.Debugln(fmt.Sprintf("Looking for prebuilt image %s...", e.helperImageInfo)) image, _, err := e.dockerConn.ImageInspectWithRaw(e.Context, e.helperImageInfo.String()) if err == nil { return &image, nil } // Try to load prebuilt image from local filesystem loadedImage := e.getLocalHelperImage() if loadedImage != nil { return loadedImage, nil } e.BuildLogger.Println("Using helper image: ", e.helperImageInfo.String()) // Fall back to getting image from registry e.BuildLogger.Debugln(fmt.Sprintf("Loading image form registry: %s", e.helperImageInfo)) return e.pullManager.GetDockerImage(e.helperImageInfo.String(), spec.ImageDockerOptions{}, nil) } func (e *executor) getLocalHelperImage() *image.InspectResponse { if e.helperImageInfo.Prebuilt == "" { return nil } image, err := prebuilt.Get(e.Context, e.dockerConn, e.helperImageInfo) if err != nil { e.BuildLogger.Debugln("Failed to load prebuilt:", err) } return image } func (e *executor) getBuildImage() (*image.InspectResponse, error) { imageName, err := e.expandImageName(e.Build.Image.Name, []string{}) if err != nil { return nil, err } dockerOptions := e.Build.Image.ExecutorOptions.Docker.Expand(e.Build.GetAllVariables()) imagePullPolicies := e.Build.Image.PullPolicies // Fetch image image, err := e.pullManager.GetDockerImage(imageName, dockerOptions, imagePullPolicies) if err != nil { return nil, err } return image, nil } func fakeContainer(id string, names ...string) *container.Summary { return &container.Summary{ID: id, Names: names} } func (e *executor) parseDeviceString(deviceString string) (device container.DeviceMapping, err error) { // Split the device string PathOnHost[:PathInContainer[:CgroupPermissions]] parts := strings.Split(deviceString, ":") if len(parts) > 3 { return device, fmt.Errorf("too many colons") } device.PathOnHost = parts[0] // Optional container path if len(parts) >= 2 { device.PathInContainer = parts[1] } else { // default: device at same path in container device.PathInContainer = device.PathOnHost } // Optional permissions if len(parts) >= 3 { device.CgroupPermissions = parts[2] } else { // default: rwm, just like 'docker run' device.CgroupPermissions = "rwm" } return device, err } func (e *executor) bindDevices() (err error) { e.devices, err = e.bindContainerDevices(e.Config.Docker.Devices) return err } func (e *executor) bindContainerDevices(devices []string) ([]container.DeviceMapping, error) { mapping := []container.DeviceMapping{} for _, deviceString := range devices { device, err := e.parseDeviceString(deviceString) if err != nil { return nil, fmt.Errorf("failed to parse device string %q: %w", deviceString, err) } mapping = append(mapping, device) } return mapping, nil } func (e *executor) bindDeviceRequests() (err error) { e.deviceRequests, err = e.bindContainerDeviceRequests(e.Config.Docker.Gpus) return err } func (e *executor) bindContainerDeviceRequests(gpus string) ([]container.DeviceRequest, error) { if strings.TrimSpace(gpus) == "" { return nil, nil } var gpuOpts opts.GpuOpts err := gpuOpts.Set(gpus) if err != nil { return nil, fmt.Errorf("parsing gpus string %q: %w", gpus, err) } return gpuOpts.Value(), nil } func isInAllowedPrivilegedImages(image string, allowedPrivilegedImages []string) bool { if len(allowedPrivilegedImages) == 0 { return true } for _, allowedImage := range allowedPrivilegedImages { ok, _ := doublestar.Match(allowedImage, image) if ok { return true } } return false } func (e *executor) isInPrivilegedServiceList(serviceDefinition spec.Image) bool { return isInAllowedPrivilegedImages(serviceDefinition.Name, e.Config.Docker.AllowedPrivilegedServices) } func (e *executor) createService( serviceIndex int, service, version, image string, definition spec.Image, linkNames []string, ) (*serviceInfo, error) { if service == "" { return nil, common.MakeBuildError("invalid service image name: %s", definition.Name) } if e.volumesManager == nil { return nil, errVolumesManagerUndefined } var serviceName string if strings.HasPrefix(version, "@sha256") { serviceName = fmt.Sprintf("%s%s...", service, version) // service@digest } else { serviceName = fmt.Sprintf("%s:%s...", service, version) // service:version } dockerOptions := definition.ExecutorOptions.Docker.Expand(e.Build.GetAllVariables()) e.BuildLogger.Println("Starting service", serviceName) serviceImage, err := e.pullManager.GetDockerImage(image, dockerOptions, definition.PullPolicies) if err != nil { return nil, err } serviceSlug := strings.ReplaceAll(service, "/", "__") containerName := e.makeContainerName(fmt.Sprintf("%s-%d", serviceSlug, serviceIndex)) // this will fail potentially some builds if there's name collision _ = e.removeContainer(e.Context, containerName) config := e.createServiceContainerConfig(service, version, serviceImage.ID, definition) devices, err := e.getServicesDevices(image) if err != nil { return nil, err } deviceRequests, err := e.getServicesDeviceRequests() if err != nil { return nil, err } hostConfig, err := e.createHostConfigForService(e.isInPrivilegedServiceList(definition), devices, deviceRequests) if err != nil { return nil, err } platform := platformForImage(serviceImage, definition.ExecutorOptions) networkConfig := e.networkConfig(linkNames) e.BuildLogger.Debugln("Creating service container", containerName, "...") resp, err := e.dockerConn.ContainerCreate(e.Context, config, hostConfig, networkConfig, platform, containerName) if err != nil { return nil, err } e.BuildLogger.Debugln(fmt.Sprintf("Starting service container %s (%s)...", containerName, resp.ID)) err = e.dockerConn.ContainerStart(e.Context, resp.ID, container.StartOptions{}) if err != nil { e.temporary = append(e.temporary, resp.ID) return nil, err } ip, ports, err := e.getContainerIPAndExposedPorts(resp.ID) if err != nil { return nil, fmt.Errorf("getting exposed ports: %w", err) } return &serviceInfo{ ID: resp.ID, Name: containerName, IP: ip, Ports: ports, }, nil } func platformForImage(image *image.InspectResponse, opts spec.ImageExecutorOptions) *v1.Platform { if image == nil || opts.Docker.Platform == "" { return nil } return &v1.Platform{ Architecture: image.Architecture, OS: image.Os, OSVersion: image.OsVersion, Variant: image.Variant, } } // processSecurityOpt processes security options and converts seccomp profile paths to inline JSON func (e *executor) processSecurityOpt(securityOpts []string) ([]string, error) { if len(securityOpts) == 0 { return securityOpts, nil } processed := make([]string, 0, len(securityOpts)) for _, opt := range securityOpts { key, value, ok := strings.Cut(opt, "=") // Check if this is a seccomp option with a file path if ok && key == "seccomp" && !strings.HasPrefix(value, "{") && value != "unconfined" && value != "builtin" { // Read the seccomp profile from file profileJSON, err := os.ReadFile(value) if err != nil { return nil, fmt.Errorf("failed to read seccomp profile from %s: %w", value, err) } if !json.Valid(profileJSON) { return nil, fmt.Errorf("seccomp profile %s is not valid JSON", value) } // Create inline seccomp option with the file contents processed = append(processed, fmt.Sprintf("seccomp=%s", profileJSON)) e.BuildLogger.Debugln("Loaded seccomp profile from", value) } else { // Pass through non-seccomp options or inline seccomp profiles as-is processed = append(processed, opt) } } return processed, nil } func (e *executor) createHostConfigForService(imageIsPrivileged bool, devices []container.DeviceMapping, deviceRequests []container.DeviceRequest) (*container.HostConfig, error) { nanoCPUs, err := e.Config.Docker.GetServiceNanoCPUs() if err != nil { return nil, fmt.Errorf("service nano cpus: %w", err) } privileged := e.Config.Docker.Privileged if e.Config.Docker.ServicesPrivileged != nil { privileged = *e.Config.Docker.ServicesPrivileged } privileged = privileged && imageIsPrivileged var useInit *bool if e.Build.IsFeatureFlagOn(featureflags.UseInitWithDockerExecutor) { yes := true useInit = &yes } // Process security options to handle seccomp profile paths servicesSecurityOpt, err := e.processSecurityOpt(e.Config.Docker.ServicesSecurityOpt) if err != nil { return nil, fmt.Errorf("processing services security options: %w", err) } return &container.HostConfig{ Resources: container.Resources{ Memory: e.Config.Docker.GetServiceMemory(), MemorySwap: e.Config.Docker.GetServiceMemorySwap(), MemoryReservation: e.Config.Docker.GetServiceMemoryReservation(), CgroupParent: e.getServiceCgroupParent(), CpusetCpus: e.Config.Docker.ServiceCPUSetCPUs, CPUShares: e.Config.Docker.ServiceCPUShares, NanoCPUs: nanoCPUs, Devices: devices, DeviceRequests: deviceRequests, }, DNS: e.Config.Docker.DNS, DNSSearch: e.Config.Docker.DNSSearch, RestartPolicy: neverRestartPolicy, ExtraHosts: e.Config.Docker.ExtraHosts, Privileged: privileged, SecurityOpt: servicesSecurityOpt, Runtime: e.Config.Docker.Runtime, UsernsMode: container.UsernsMode(e.Config.Docker.UsernsMode), NetworkMode: e.networkMode, Binds: e.volumesManager.Binds(), ShmSize: e.Config.Docker.ShmSize, Tmpfs: e.Config.Docker.ServicesTmpfs, LogConfig: e.logConfig, Init: useInit, }, nil } func (e *executor) createServiceContainerConfig( service, version, serviceImageID string, definition spec.Image, ) *container.Config { labels := e.prepareContainerLabels(map[string]string{ "type": labelServiceType, "service": service, "service.version": version, }) // NOTE: the follow is for backwards-compatibility. // See https://gitlab.com/gitlab-org/gitlab-runner/-/issues/39048 // It adds the labels from the configuration with the gitlab-runner prefix. // The SSoT for the dockerLabelPrefix is the labels package, but lets avoid // exporting it or providing helper functions to add it. // The code below is an EXCEPTION and should be removed asap. const dockerLabelPrefix = "com.gitlab.gitlab-runner" for k, v := range e.Config.Docker.ContainerLabels { labels[fmt.Sprintf("%s.%s", dockerLabelPrefix, k)] = e.Build.Variables.ExpandValue(v) } config := &container.Config{ Image: serviceImageID, Labels: labels, Env: e.getServiceVariables(definition), } if len(definition.Command) > 0 { config.Cmd = definition.Command } config.Entrypoint = e.overwriteEntrypoint(&definition) config.User = string(definition.ExecutorOptions.Docker.Expand(e.Build.GetAllVariables()).User) return config } func (e *executor) getServicesDevices(image string) ([]container.DeviceMapping, error) { var devices []container.DeviceMapping for imageGlob, deviceStrings := range e.Config.Docker.ServicesDevices { ok, err := doublestar.Match(imageGlob, image) if err != nil { return nil, fmt.Errorf("invalid service device image pattern: %s: %w", imageGlob, err) } if !ok { continue } dvs, err := e.bindContainerDevices(deviceStrings) if err != nil { return nil, err } devices = append(devices, dvs...) } return devices, nil } func (e *executor) getServicesDeviceRequests() ([]container.DeviceRequest, error) { return e.bindContainerDeviceRequests(e.Config.Docker.ServiceGpus) } func (e *executor) networkConfig(aliases []string) *network.NetworkingConfig { // setting a container's mac-address changed in API version 1.44 if e.serverAPIVersion.LessThan(version1_44) { return e.networkConfigLegacy(aliases) } nm := string(e.networkMode) nc := network.NetworkingConfig{} if nm == "" { // docker defaults to using "bridge" network driver if none was specified. nc.EndpointsConfig = map[string]*network.EndpointSettings{ network.NetworkDefault: {MacAddress: e.Config.Docker.MacAddress}, } return &nc } nc.EndpointsConfig = map[string]*network.EndpointSettings{ nm: {MacAddress: e.Config.Docker.MacAddress}, } if e.networkMode.IsUserDefined() { nc.EndpointsConfig[nm].Aliases = aliases } return &nc } // Setting a container's mac-address changed in API version 1.44. This is the original/legacy/pre-1.44 way to set // mac-address. func (e *executor) networkConfigLegacy(aliases []string) *network.NetworkingConfig { if e.networkMode.UserDefined() == "" { return &network.NetworkingConfig{} } return &network.NetworkingConfig{ EndpointsConfig: map[string]*network.EndpointSettings{ e.networkMode.UserDefined(): {Aliases: aliases}, }, } } func (e *executor) getProjectUniqRandomizedName() string { if e.projectUniqRandomizedName == "" { uuid, _ := helpers.GenerateRandomUUID(8) e.projectUniqRandomizedName = fmt.Sprintf("%s-%s", e.Build.ProjectUniqueName(), uuid) } return e.projectUniqRandomizedName } // Build and predefined container names are comprised of: // - A runner project scoped ID (runner--project--concurrent-) // - A unique randomized ID for each execution // - The container's type (build, predefined, step-runner) // // For example: runner-linux-project-123-concurrent-2-0a1b2c3d-predefined // // A container of the same type is created _once_ per execution and re-used. func (e *executor) makeContainerName(suffix string) string { return e.getProjectUniqRandomizedName() + "-" + suffix } func (e *executor) createBuildNetwork() error { if e.networksManager == nil { return errNetworksManagerUndefined } networkMode, err := e.networksManager.Create(e.Context, e.Config.Docker.NetworkMode, e.Config.Docker.EnableIPv6) if err != nil { return err } e.networkMode = networkMode return nil } func (e *executor) cleanupNetwork(ctx context.Context) error { if e.networksManager == nil { return errNetworksManagerUndefined } if e.networkMode.UserDefined() == "" { return nil } inspectResponse, err := e.networksManager.Inspect(ctx) if err != nil { e.BuildLogger.Errorln("network inspect returned error ", err) return nil } for id := range inspectResponse.Containers { e.BuildLogger.Debugln("Removing Container", id, "...") err = e.removeContainer(ctx, id) if err != nil { e.BuildLogger.Errorln("remove container returned error ", err) } } return e.networksManager.Cleanup(ctx) } func (e *executor) isInPrivilegedImageList(imageDefinition spec.Image) bool { return isInAllowedPrivilegedImages(imageDefinition.Name, e.Config.Docker.AllowedPrivilegedImages) } type containerConfigurator interface { ContainerConfig(image *image.InspectResponse) (*container.Config, error) HostConfig() (*container.HostConfig, error) NetworkConfig(aliases []string) *network.NetworkingConfig } type defaultContainerConfigurator struct { e *executor containerType string imageDefinition spec.Image cmd []string allowedInternalImages []string } var _ containerConfigurator = &defaultContainerConfigurator{} func newDefaultContainerConfigurator( e *executor, containerType string, imageDefinition spec.Image, cmd, allowedInternalImages []string, ) *defaultContainerConfigurator { return &defaultContainerConfigurator{ e: e, containerType: containerType, imageDefinition: imageDefinition, cmd: cmd, allowedInternalImages: allowedInternalImages, } } func (c *defaultContainerConfigurator) ContainerConfig(image *image.InspectResponse) (*container.Config, error) { hostname := c.e.Config.Docker.Hostname if hostname == "" { hostname = c.e.Build.ProjectUniqueName() } return c.e.createContainerConfig( c.containerType, c.imageDefinition, image, hostname, c.cmd, ) } func (c *defaultContainerConfigurator) HostConfig() (*container.HostConfig, error) { return c.e.createHostConfig( c.containerType == buildContainerType, c.e.isInPrivilegedImageList(c.imageDefinition), ) } func (c *defaultContainerConfigurator) NetworkConfig(aliases []string) *network.NetworkingConfig { return c.e.networkConfig(aliases) } func (e *executor) createContainer( containerType string, imageDefinition spec.Image, allowedInternalImages []string, cfgTor containerConfigurator, ) (*container.InspectResponse, error) { if e.volumesManager == nil { return nil, errVolumesManagerUndefined } image, err := e.expandAndGetDockerImage( imageDefinition.Name, allowedInternalImages, imageDefinition.ExecutorOptions.Docker, imageDefinition.PullPolicies, ) if err != nil { return nil, err } containerName := e.makeContainerName(containerType) config, err := cfgTor.ContainerConfig(image) if err != nil { return nil, fmt.Errorf("failed to create container configuration: %w", err) } hostConfig, err := cfgTor.HostConfig() if err != nil { return nil, err } networkConfig := cfgTor.NetworkConfig([]string{"build", containerName}) var platform *v1.Platform // predefined/helper container always uses native platform if containerType == buildContainerType { platform = platformForImage(image, imageDefinition.ExecutorOptions) } // this will fail potentially some builds if there's name collision _ = e.removeContainer(e.Context, containerName) e.BuildLogger.Debugln("Creating container", containerName, "...") resp, err := e.dockerConn.ContainerCreate(e.Context, config, hostConfig, networkConfig, platform, containerName) if resp.ID != "" { e.temporary = append(e.temporary, resp.ID) if containerType == buildContainerType { e.buildContainerID = resp.ID } } if err != nil { return nil, err } inspect, err := e.dockerConn.ContainerInspect(e.Context, resp.ID) return &inspect, err } func (e *executor) createContainerConfig( containerType string, imageDefinition spec.Image, image *image.InspectResponse, hostname string, cmd []string, ) (*container.Config, error) { labels := e.prepareContainerLabels(map[string]string{"type": containerType}) jobVars, err := e.prepareContainerEnvVariables() if err != nil { return nil, fmt.Errorf("setting job variables: %w", err) } config := &container.Config{ Image: image.ID, Hostname: hostname, Cmd: cmd, Labels: labels, Tty: false, AttachStdin: true, AttachStdout: true, AttachStderr: true, OpenStdin: true, StdinOnce: true, Entrypoint: e.overwriteEntrypoint(&imageDefinition), Env: jobVars.StringList(), } //nolint:nestif if containerType == buildContainerType { if e.Build.UseNativeSteps() { config.Cmd = append([]string{bootstrappedBinary, "steps", "serve"}, config.Cmd...) // Environment variables interferes with steps. Given this situation, when // native steps are enabled, we no longer add the env vars to the container. config.Env = nil } // user config should only be set in build containers if user, err := e.getBuildContainerUser(imageDefinition); err != nil { return nil, err } else { config.User = user } } // setting a container's mac-address changed in API version 1.44 if e.serverAPIVersion.LessThan(version1_44) { //nolint:staticcheck config.MacAddress = e.Config.Docker.MacAddress } return config, nil } // prepareContainerEnvVariables prepares the environment variables for the build container. // When native steps are enabled, it compresses the list of job variable names and adds them // to the environment as RUNNER_JOB_VAR_NAMES. This allows step-runner to identify and filter // out job variables from the OS environment, preventing environment variable size limit issues. // // The variable names are gzip-compressed to minimize the size of the RUNNER_JOB_VAR_NAMES // environment variable itself, which is important on systems with strict environment limits // (particularly Windows). // // For non-native step builds, the function returns the variables unchanged since step-runner // filtering is not needed. func (e *executor) prepareContainerEnvVariables() (spec.Variables, error) { vars := e.Build.GetAllVariables() if !e.Build.UseNativeSteps() { return vars, nil } names := vars.GetAllVariableNames() compressedVarNames, err := gzipString(names) if err != nil { return nil, fmt.Errorf("job variables names compression failed: %w", err) } v := append([]spec.Variable{}, vars...) v = append(v, spec.Variable{ Key: runnerJobVarsNames, Value: compressedVarNames, }) return v, nil } // gzipString compresses a string and returns the compressed string. func gzipString(src string) (string, error) { var b bytes.Buffer gz := gzip.NewWriter(&b) if _, err := gz.Write([]byte(src)); err != nil { return "", fmt.Errorf("writing to gzip writer: %w", err) } if err := gz.Close(); err != nil { return "", fmt.Errorf("closing gzip writer: %w", err) } return base64.StdEncoding.EncodeToString(b.Bytes()), nil } func (e *executor) getBuildContainerUser(imageDefinition spec.Image) (string, error) { // runner config takes precedence user := e.Config.Docker.User if user == "" { user = string(imageDefinition.ExecutorOptions.Docker.Expand(e.Build.GetAllVariables()).User) } if !e.Config.Docker.IsUserAllowed(user) { return "", fmt.Errorf("user %q is not an allowed user: %v", user, e.Config.Docker.AllowedUsers) } return user, nil } // getCgroupParent returns the cgroup parent for build containers func (e *executor) getCgroupParent() string { if path := e.Config.GetSlotCgroupPath(e.Build.ExecutorData); path != "" { return path } return e.Config.Docker.CgroupParent } // getServiceCgroupParent returns the cgroup parent for service containers func (e *executor) getServiceCgroupParent() string { if path := e.Config.GetServiceSlotCgroupPath(e.Build.ExecutorData); path != "" { return path } return e.Config.Docker.ServiceCgroupParent } func (e *executor) createHostConfig(isBuildContainer, imageIsPrivileged bool) (*container.HostConfig, error) { nanoCPUs, err := e.Config.Docker.GetNanoCPUs() if err != nil { return nil, err } isolation := container.Isolation(e.Config.Docker.Isolation) if !isolation.IsValid() { return nil, fmt.Errorf("the isolation value %q is not valid. "+ "the valid values are: 'process', 'hyperv', 'default' and an empty string", isolation) } ulimits, err := e.Config.Docker.GetUlimits() if err != nil { return nil, err } var useInit *bool if isBuildContainer && e.Build.IsFeatureFlagOn(featureflags.UseInitWithDockerExecutor) { yes := true useInit = &yes } // Process security options to handle seccomp profile paths securityOpt, err := e.processSecurityOpt(e.Config.Docker.SecurityOpt) if err != nil { return nil, fmt.Errorf("processing security options: %w", err) } return &container.HostConfig{ Resources: container.Resources{ Memory: e.Config.Docker.GetMemory(), MemorySwap: e.Config.Docker.GetMemorySwap(), MemoryReservation: e.Config.Docker.GetMemoryReservation(), CgroupParent: e.getCgroupParent(), CpusetCpus: e.Config.Docker.CPUSetCPUs, CpusetMems: e.Config.Docker.CPUSetMems, CPUShares: e.Config.Docker.CPUShares, NanoCPUs: nanoCPUs, Devices: e.devices, DeviceRequests: e.deviceRequests, OomKillDisable: e.Config.Docker.GetOomKillDisable(), DeviceCgroupRules: e.Config.Docker.DeviceCgroupRules, Ulimits: ulimits, }, DNS: e.Config.Docker.DNS, DNSSearch: e.Config.Docker.DNSSearch, Runtime: e.Config.Docker.Runtime, Privileged: e.Config.Docker.Privileged && imageIsPrivileged, GroupAdd: e.Config.Docker.GroupAdd, UsernsMode: container.UsernsMode(e.Config.Docker.UsernsMode), CapAdd: e.Config.Docker.CapAdd, CapDrop: e.Config.Docker.CapDrop, SecurityOpt: securityOpt, RestartPolicy: neverRestartPolicy, ExtraHosts: append(e.Config.Docker.ExtraHosts, e.links...), NetworkMode: e.networkMode, IpcMode: container.IpcMode(e.Config.Docker.IpcMode), Links: e.Config.Docker.Links, Binds: e.volumesManager.Binds(), OomScoreAdj: e.Config.Docker.OomScoreAdjust, ShmSize: e.Config.Docker.ShmSize, Isolation: isolation, VolumeDriver: e.Config.Docker.VolumeDriver, VolumesFrom: e.Config.Docker.VolumesFrom, LogConfig: e.logConfig, Tmpfs: e.Config.Docker.Tmpfs, Sysctls: e.Config.Docker.SysCtls, Init: useInit, }, nil } func (e *executor) startAndWatchContainer(ctx context.Context, id string, input io.Reader) error { dockerExec := exec.NewDocker(e.Context, e.dockerConn, e.waiter, e.Build.Log()) stdout := e.BuildLogger.Stream(buildlogger.StreamWorkLevel, buildlogger.Stdout) defer stdout.Close() stderr := e.BuildLogger.Stream(buildlogger.StreamWorkLevel, buildlogger.Stderr) defer stderr.Close() streams := exec.IOStreams{ Stdin: input, Stdout: stdout, Stderr: stderr, } var gracefulExitFunc wait.GracefulExitFunc if id == e.buildContainerID && e.helperImageInfo.OSType != helperimage.OSTypeWindows { // send SIGTERM to all processes in the build container. gracefulExitFunc = e.sendSIGTERMToContainerProcs } err := dockerExec.Exec(ctx, id, streams, gracefulExitFunc) // if the context is canceled we attempt to remove the container, // as Exec making calls such as ContainerAttach that are canceled // can leave the container in a state that cannot easily be recovered // from. if ctx.Err() != nil { _ = e.removeContainer(e.Context, id) } return err } func (e *executor) removeContainer(ctx context.Context, id string) error { e.BuildLogger.Debugln("Removing container", id) e.disconnectNetwork(ctx, id) options := container.RemoveOptions{ RemoveVolumes: !e.Config.Docker.VolumeKeep, Force: true, } err := e.dockerConn.ContainerRemove(ctx, id, options) if docker.IsErrNotFound(err) { return nil } if err != nil { e.BuildLogger.Debugln("Removing container", id, "finished with error", err) return fmt.Errorf("removing container: %w", err) } e.BuildLogger.Debugln("Removed container", id) return nil } func (e *executor) disconnectNetwork(ctx context.Context, id string) { e.BuildLogger.Debugln("Disconnecting container", id, "from networks") netList, err := e.dockerConn.NetworkList(ctx, network.ListOptions{}) if err != nil { e.BuildLogger.Debugln("Can't get network list. ListNetworks exited with", err) return } for _, network := range netList { for _, pluggedContainer := range network.Containers { if id == pluggedContainer.Name { err = e.dockerConn.NetworkDisconnect(ctx, network.ID, id, true) if err != nil { e.BuildLogger.Warningln( "Can't disconnect possibly zombie container", pluggedContainer.Name, "from network", network.Name, "->", err, ) } else { e.BuildLogger.Warningln( "Possibly zombie container", pluggedContainer.Name, "is disconnected from network", network.Name, ) } break } } } } func (e *executor) verifyAllowedImage(image, optionName string, allowedImages, internalImages []string) error { options := common.VerifyAllowedImageOptions{ Image: image, OptionName: optionName, AllowedImages: allowedImages, InternalImages: internalImages, } return common.VerifyAllowedImage(options, e.BuildLogger) } func (e *executor) expandImageName(imageName string, allowedInternalImages []string) (string, error) { defaultDockerImage := e.ExpandValue(e.Config.Docker.Image) if imageName != "" { image := e.ExpandValue(imageName) allowedInternalImages = append(allowedInternalImages, defaultDockerImage) err := e.verifyAllowedImage(image, "images", e.Config.Docker.AllowedImages, allowedInternalImages) if err != nil { return "", err } return image, nil } if defaultDockerImage == "" { return "", errors.New("no Docker image specified to run the build in") } e.BuildLogger. WithFields(logrus.Fields{ "executor": "docker", "image": defaultDockerImage, }). Infoln("Using default image") return defaultDockerImage, nil } func (e *executor) overwriteEntrypoint(image *spec.Image) []string { if len(image.Entrypoint) > 0 { if !e.Config.Docker.DisableEntrypointOverwrite { return image.Entrypoint } e.BuildLogger.Warningln("Entrypoint override disabled") } return nil } func connectDocker(ctx context.Context, options common.ExecutorPrepareOptions, e *executor) error { _ = e.dockerConn.Close() dockerConnection, err := createDockerConnection(ctx, options, e) if err != nil { return fmt.Errorf("creating docker connection: %w", err) } info, err := dockerConnection.Info(ctx) if err != nil { return fmt.Errorf("getting docker info: %w", err) } serverVersion, err := dockerConnection.ServerVersion(ctx) if err != nil { return fmt.Errorf("getting server version info: %w", err) } serverAPIVersion, err := version.NewVersion(serverVersion.APIVersion) if err != nil { return fmt.Errorf("parsing server API version %q: %w", serverVersion.APIVersion, err) } if err := validateOSType(info); err != nil { return err } e.BuildLogger.Debugln(fmt.Sprintf( "Connected to docker daemon (client version: %s, server version: %s, api version: %s, kernel: %s, os: %s/%s)", dockerConnection.ClientVersion(), info.ServerVersion, serverVersion.APIVersion, info.KernelVersion, info.OSType, info.Architecture, )) e.dockerConn = dockerConnection e.info = info e.serverAPIVersion = serverAPIVersion e.waiter = wait.NewDockerKillWaiter(dockerConnection) return nil } type contextDialerFunc = func(ctx context.Context, network, addr string) (net.Conn, error) func environmentDialContext( ctx context.Context, executorClient executors.Client, host string, useDockerAutoscalerDialStdio bool, ) (string, contextDialerFunc, error) { systemHost := host == "" if host == "" { host = os.Getenv("DOCKER_HOST") } if host == "" { host = client.DefaultDockerHost } u, err := client.ParseHostURL(host) if err != nil { return "", nil, fmt.Errorf("parsing docker host: %w", err) } if !useDockerAutoscalerDialStdio { return u.Scheme, func(ctx context.Context, network, addr string) (net.Conn, error) { conn, err := executorClient.Dial(u.Scheme, u.Host) if err != nil { return nil, fmt.Errorf("dialing environment connection: %w", err) } return conn, nil }, nil } return "dial-stdio", func(_ context.Context, network, addr string) (net.Conn, error) { // DialRun doesn't want just a context for dialing, but one for a long-lived connection, including cleanup. // We don't want this context to be cancelled when the job is cancelled or times out since that would prevent // cleanup. // if the host was explicit, we try to use this even with dial-stdio cmd := fmt.Sprintf("docker -H %s system dial-stdio", host) // rather than use this system's host, we use the remote system's default if systemHost { cmd = "docker system dial-stdio" } return executorClient.DialRun(ctx, cmd) }, nil } // validateOSType checks if the ExecutorOptions metadata matches with the docker // info response. func validateOSType(info system.Info) error { switch info.OSType { case osTypeLinux, osTypeWindows, osTypeFreeBSD: return nil } return fmt.Errorf("unsupported os type: %s", info.OSType) } func (e *executor) createDependencies() error { createDependenciesStrategy := []func() error{ e.createLabeler, e.createNetworksManager, e.createBuildNetwork, e.createPullManager, e.bindDevices, e.bindDeviceRequests, e.createVolumesManager, e.createVolumes, e.createBuildVolume, e.bootstrap, e.createServices, } for _, setup := range createDependenciesStrategy { err := setup() if err != nil { return err } } return nil } func (e *executor) createVolumes() error { e.SetCurrentStage(ExecutorStageCreatingUserVolumes) e.BuildLogger.Debugln("Creating user-defined volumes...") if e.volumesManager == nil { return errVolumesManagerUndefined } for _, volume := range e.Config.Docker.Volumes { err := e.volumesManager.Create(e.Context, volume) if err != nil { return err } } return nil } func (e *executor) createBuildVolume() error { e.SetCurrentStage(ExecutorStageCreatingBuildVolumes) e.BuildLogger.Debugln("Creating build volume...") if e.volumesManager == nil { return errVolumesManagerUndefined } jobsDir := e.Build.RootDir var err error if e.Build.GetGitStrategy() == common.GitFetch { err = e.volumesManager.Create(e.Context, jobsDir) if err == nil { return nil } } else { err = e.volumesManager.CreateTemporary(e.Context, jobsDir) } if err != nil { var volDefinedErr *volumes.ErrVolumeAlreadyDefined if !errors.As(err, &volDefinedErr) { return err } } return nil } func (e *executor) Prepare(options common.ExecutorPrepareOptions) error { e.SetCurrentStage(ExecutorStagePrepare) if options.Config.Docker == nil { return errors.New("missing docker configuration") } e.AbstractExecutor.PrepareConfiguration(options) var err error e.logConfig, err = options.Config.Docker.GetLogConfig() if err != nil { return &common.BuildError{ Inner: fmt.Errorf("creating docker log configuration: %w", err), FailureReason: common.RunnerSystemFailure, } } err = e.dockerConnector.Connect(e.Context, options, e) if err != nil { return err } e.helperImageInfo, err = e.prepareHelperImage() if err != nil { return err } // setup default executor options based on OS type e.setupDefaultExecutorOptions(e.helperImageInfo.OSType) err = e.prepareBuildsDir(options) if err != nil { return err } err = e.AbstractExecutor.PrepareBuildAndShell() if err != nil { return err } if e.BuildShell.PassFile { return errors.New("docker doesn't support shells that require script file") } imageName, err := e.expandImageName(e.Build.Image.Name, []string{}) if err != nil { return err } e.BuildLogger.Println("Using Docker executor with image", imageName, "...") if e.Config.Docker.VolumeKeep { e.BuildLogger.Warningln("volume_keep is enabled: Docker volumes will not be removed after job completion and may accumulate on disk") } err = e.createDependencies() if err != nil { return err } return nil } func (e *executor) setupDefaultExecutorOptions(os string) { switch os { case helperimage.OSTypeWindows: e.DefaultBuildsDir = `C:\builds` e.DefaultCacheDir = `C:\cache` e.ExecutorOptions.Shell.Shell = shells.SNPowershell e.ExecutorOptions.Shell.RunnerCommand = "gitlab-runner-helper" if e.volumeParser == nil { e.volumeParser = parser.NewWindowsParser(e.ExpandValue) } if e.newVolumePermissionSetter == nil { e.newVolumePermissionSetter = func() (permission.Setter, error) { return permission.NewDockerWindowsSetter(), nil } } default: e.DefaultBuildsDir = `/builds` e.DefaultCacheDir = `/cache` e.ExecutorOptions.Shell.Shell = "bash" e.ExecutorOptions.Shell.RunnerCommand = "/usr/bin/gitlab-runner-helper" if e.volumeParser == nil { e.volumeParser = parser.NewLinuxParser(e.ExpandValue) } if e.newVolumePermissionSetter == nil { e.newVolumePermissionSetter = func() (permission.Setter, error) { helperImage, err := e.getHelperImage() if err != nil { return nil, err } return permission.NewDockerLinuxSetter(e.dockerConn, e.Build.Log(), helperImage), nil } } } } func (e *executor) prepareHelperImage() (helperimage.Info, error) { return helperimage.Get(common.AppVersion.Version, helperimage.Config{ OSType: e.info.OSType, Architecture: e.info.Architecture, KernelVersion: e.info.KernelVersion, Shell: e.Config.Shell, Flavor: e.ExpandValue(e.Config.Docker.HelperImageFlavor), ProxyExec: e.Config.IsProxyExec(), Concrete: e.Build.IsFeatureFlagOn(featureflags.UseConcrete), }) } func (e *executor) prepareBuildsDir(options common.ExecutorPrepareOptions) error { if e.volumeParser == nil { return common.MakeBuildError("missing volume parser") } isHostMounted, err := volumes.IsHostMountedVolume(e.volumeParser, e.RootDir(), options.Config.Docker.Volumes...) if err != nil { return &common.BuildError{Inner: err} } // We need to set proper value for e.SharedBuildsDir because // it's required to properly start the job, what is done inside of // e.AbstractExecutor.Prepare() // And a started job is required for Volumes Manager to work, so it's // done before the manager is even created. if isHostMounted { e.SharedBuildsDir = true } return nil } func (e *executor) Cleanup() { if e.Config.Docker == nil { // if there's no Docker config, we got here because Prepare() failed // and there's nothing to cleanup. return } e.SetCurrentStage(ExecutorStageCleanup) var wg sync.WaitGroup // create a new context for cleanup in case the main context has expired or been cancelled. ctx, cancel := context.WithTimeout(context.Background(), dockerCleanupTimeout) defer cancel() defer func() { if err := e.dockerConn.Close(); err != nil { e.BuildLogger.WithFields(logrus.Fields{"error": err}).Debugln("Failed to close the client") } }() remove := func(id string) { wg.Add(1) go func() { if err := e.removeContainer(ctx, id); err != nil { e.BuildLogger.WithFields(logrus.Fields{"error": err}).Errorln("Failed to remove container", id) } wg.Done() }() } for _, temporaryID := range e.temporary { remove(temporaryID) } wg.Wait() if err := e.cleanupVolume(ctx); err != nil { e.BuildLogger.WithFields(logrus.Fields{"error": err}).Errorln("Failed to cleanup volumes") } if err := e.cleanupNetwork(ctx); err != nil { e.BuildLogger.WithFields(logrus.Fields{ "network": e.networkMode.NetworkName(), "error": err, }).Errorln("Failed to remove network for build") } e.AbstractExecutor.Cleanup() } // sendSIGTERMToContainerProcs exec's into the specified container and executes the script // shells.sendSIGTERMToContainerProcs, which (unsurprisingly) sends SIGTERM to all processes in the container. This // Effectively gives the processes in the container a chance to exit gracefully (if they listen for SIGTERM). func (e *executor) sendSIGTERMToContainerProcs(ctx context.Context, containerID string) error { e.BuildLogger.Debugln("Emitting SIGTERM to processes in container", containerID) return e.execScriptOnContainer(ctx, containerID, shells.ContainerSigTermScriptForLinux) } // Because docker error types are in fact interfaces with a unique identifying method, it's not possible to use // errors.Is or errors.As on them. And because we wrap those errors as they are returned up the chain, we can't use // errdefs directly. Do this instead. func shouldIgnoreDockerError(err error, isFuncs ...func(error) bool) bool { if err == nil { return true } for e := err; e != nil; e = errors.Unwrap(e) { for _, is := range isFuncs { if is(e) { return true } } } return false } func (e *executor) execScriptOnContainer(ctx context.Context, containerID string, script ...string) (err error) { action := "" execConfig := container.ExecOptions{ Tty: false, AttachStderr: true, AttachStdout: true, Cmd: append([]string{"sh", "-c"}, script...), } defer func() { if !shouldIgnoreDockerError(err, errdefs.IsConflict, errdefs.IsNotFound) { e.Config.Log().WithFields(logrus.Fields{"error": err}).Warningln(action, err) } }() exec, err := e.dockerConn.ContainerExecCreate(ctx, containerID, execConfig) if err != nil { action = "Failed to exec create to container:" return err } resp, err := e.dockerConn.ContainerExecAttach(ctx, exec.ID, container.ExecStartOptions{}) if err != nil { action = "Failed to exec attach to container:" return err } defer resp.Close() ctx, cancel := context.WithCancel(ctx) defer cancel() go func() { <-ctx.Done() resp.Close() }() // Copy any output generated by running the script (typically there will be none) to runner's stdout/stderr... _, err = stdcopy.StdCopy(os.Stdout, os.Stderr, resp.Reader) if err != nil { action = "Failed to read from attached container:" return err } return nil } func (e *executor) cleanupVolume(ctx context.Context) error { if e.volumesManager == nil { e.BuildLogger.Debugln("Volumes manager is empty, skipping volumes cleanup") return nil } err := e.volumesManager.RemoveTemporary(ctx) if err != nil { return fmt.Errorf("remove temporary volumes: %w", err) } return nil } func (e *executor) createHostConfigForServiceHealthCheck(service *serviceInfo) *container.HostConfig { var extraHosts []string // we only get a service IP from the default network, for other networks, Docker // already provides DNS entries for _, ip := range service.IP { extraHosts = []string{service.ID[:min(12, len(service.ID))] + ":" + ip} } return &container.HostConfig{ RestartPolicy: neverRestartPolicy, ExtraHosts: extraHosts, NetworkMode: e.networkMode, LogConfig: e.logConfig, } } // addServiceHealthCheckEnvironment returns environment variables mimicing // the legacy container links networking feature of Docker, where environment // variables are provided with the hostname and port of the linked service our // health check is performed against. // // The hostname we provide is the container's short ID (the first 12 characters // of a full container ID). The short ID, as opposed to the full ID, is // internally resolved to the container's IP address by Docker's built-in DNS // service. // // The legacy container links (https://docs.docker.com/network/links/) network // feature is deprecated. When we remove support for links, the healthcheck // system can be updated to no longer rely on environment variables func (e *executor) addServiceHealthCheckEnvironment(service *serviceInfo) ([]string, error) { environment := []string{} if len(service.Ports) == 0 { return environment, fmt.Errorf("service %q has no exposed ports", service.Name) } environment = append(environment, "WAIT_FOR_SERVICE_TCP_ADDR="+service.ID[:12]) for _, port := range service.Ports { environment = append(environment, fmt.Sprintf("WAIT_FOR_SERVICE_%d_TCP_PORT=%d", port, port)) } return environment, nil } //nolint:gocognit func (e *executor) getContainerIPAndExposedPorts(id string) ([]string, []int, error) { // We either wait for the user's provided timeout, or our default, whichever is larger. // // The reason we don't wait for the smaller timeout is because users often set WaitForServicesTimeout=-1, // or a low number, to indicate they want to skip the healthcheck. In this scenario, we're not using // it for the healthcheck, but the wait for the container to come up. timeout := max(e.Config.Docker.WaitForServicesTimeout, common.DefaultWaitForServicesTimeout) var inspect container.InspectResponse start := time.Now() for { if time.Since(start) > time.Duration(timeout)*time.Second { return nil, nil, fmt.Errorf("service failed to start after %v", time.Since(start)) } var err error inspect, err = e.dockerConn.ContainerInspect(e.Context, id) if err != nil { return nil, nil, err } if inspect.State.Status != container.StateCreated { break } time.Sleep(time.Second) } var ip []string if inspect.NetworkSettings.IPAddress != "" { //nolint:staticcheck ip = append(ip, inspect.NetworkSettings.IPAddress) //nolint:staticcheck } if inspect.NetworkSettings.GlobalIPv6Address != "" { //nolint:staticcheck ip = append(ip, inspect.NetworkSettings.GlobalIPv6Address) //nolint:staticcheck } for _, env := range inspect.Config.Env { key, val, ok := strings.Cut(env, "=") if !ok { continue } if strings.EqualFold(key, "HEALTHCHECK_TCP_PORT") { port, err := strconv.ParseInt(val, 10, 32) if err != nil { return nil, nil, fmt.Errorf("invalid health check tcp port: %v", val) } return ip, []int{int(port)}, nil } } // maxPortsCheck is the maximum number of ports that we'll check to see // if a service is running const maxPortsCheck = 20 var ports []int for port := range inspect.Config.ExposedPorts { start, end, err := port.Range() if err == nil && port.Proto() == "tcp" { for i := start; i <= end && len(ports) < maxPortsCheck; i++ { ports = append(ports, i) } } } sort.Ints(ports) return ip, ports, nil } func (e *executor) readContainerLogs(containerID string) string { var buf bytes.Buffer options := container.LogsOptions{ ShowStdout: true, ShowStderr: true, Timestamps: true, } hijacked, err := e.dockerConn.ContainerLogs(e.Context, containerID, options) if err != nil { return strings.TrimSpace(err.Error()) } defer func() { _ = hijacked.Close() }() // limit how much data we read from the container log to // avoid memory exhaustion w := limitwriter.New(&buf, ServiceLogOutputLimit) _, _ = stdcopy.StdCopy(w, w, hijacked) return strings.TrimSpace(buf.String()) } // prepareContainerLabels returns a map of the default labels combined with the passed otherLabels // and the docker labels from the config. func (e *executor) prepareContainerLabels(otherLabels map[string]string) map[string]string { l := e.labeler.Labels(otherLabels) for k, v := range e.Config.Docker.ContainerLabels { l[k] = e.Build.Variables.ExpandValue(v) } return l } ================================================ FILE: executors/docker/docker_command.go ================================================ package docker import ( "bytes" "context" "errors" "fmt" "strings" "sync" "time" "github.com/docker/docker/api/types/container" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/exec" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/user" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/limitwriter" ) const ( buildContainerType = "build" predefinedContainerType = "predefined" ) type commandExecutor struct { executor helperContainer *container.InspectResponse buildContainer *container.InspectResponse lock sync.Mutex terminalWaitForContainerTimeout time.Duration } func (s *commandExecutor) getBuildContainer() *container.InspectResponse { s.lock.Lock() defer s.lock.Unlock() return s.buildContainer } func (s *commandExecutor) Prepare(options common.ExecutorPrepareOptions) error { err := s.executor.Prepare(options) if err != nil { return err } s.BuildLogger.Debugln("Starting Docker command...") if len(s.BuildShell.DockerCommand) == 0 { return errors.New("script is not compatible with Docker") } _, err = s.getHelperImage() if err != nil { return err } _, err = s.getBuildImage() if err != nil { return err } if s.isUmaskDisabled() { s.BuildLogger.Println("Not using umask - FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR is set!") } return nil } func (s *commandExecutor) isUmaskDisabled() bool { // Not usable with docker-windows executor if s.info.OSType == osTypeWindows { return false } if !s.Build.IsFeatureFlagOn(featureflags.DisableUmaskForDockerExecutor) { return false } return true } func (s *commandExecutor) Run(cmd common.ExecutorCommand) error { if cmd.Predefined { return s.runContainer(predefinedContainerType, cmd) } else { return s.runContainer(buildContainerType, cmd) } } func (s *commandExecutor) runContainer(containerType string, cmd common.ExecutorCommand) error { maxAttempts := s.Build.GetExecutorJobSectionAttempts() var runErr error for attempts := 1; attempts <= maxAttempts; attempts++ { if attempts > 1 { s.BuildLogger.Infoln(fmt.Sprintf("Retrying %s", cmd.Stage)) } ctr, err := s.requestContainer(containerType) if err != nil { return err } s.BuildLogger.Debugln("Executing on", ctr.Name, "the", cmd.Script) s.SetCurrentStage(ExecutorStageRun) runErr = s.startAndWatchContainer(cmd.Context, ctr.ID, bytes.NewBufferString(cmd.Script)) if !docker.IsErrNotFound(runErr) { return runErr } s.BuildLogger.Errorln(fmt.Sprintf("Container %q not found or removed. Will retry...", ctr.ID)) } if runErr != nil && maxAttempts > 1 { s.BuildLogger.Errorln("Execution attempts exceeded") } return runErr } func (s *commandExecutor) requestContainer(containerType string) (*container.InspectResponse, error) { switch containerType { case buildContainerType: return s.requestBuildContainer() case predefinedContainerType: return s.requestHelperContainer() default: return nil, fmt.Errorf("invalid container-type %q", containerType) } } func (s *commandExecutor) hasExistingContainer(containerType string, container *container.InspectResponse) bool { if container == nil { return false } _, err := s.dockerConn.ContainerInspect(s.Context, container.ID) if err == nil { return true } if docker.IsErrNotFound(err) { return false } s.BuildLogger.Warningln("Failed to inspect", containerType, "container", container.ID, err.Error()) return false } func (s *commandExecutor) requestHelperContainer() (*container.InspectResponse, error) { if s.hasExistingContainer(predefinedContainerType, s.helperContainer) { return s.helperContainer, nil } prebuildImage, err := s.getHelperImage() if err != nil { return nil, err } buildImage := spec.Image{ Name: prebuildImage.ID, } s.helperContainer, err = s.createContainer( predefinedContainerType, buildImage, []string{prebuildImage.ID}, newDefaultContainerConfigurator(&s.executor, predefinedContainerType, buildImage, s.getHelperImageCmd(), []string{prebuildImage.ID}), ) if err != nil { return nil, err } if data, ok := s.Build.ExecutorData.(*executorData); ok { data.ContainerName = s.helperContainer.Name } return s.helperContainer, nil } func (s *commandExecutor) getHelperImageCmd() []string { if s.isUmaskDisabled() { if s.Config.IsProxyExec() { return []string{"gitlab-runner-helper", "proxy-exec", "--bootstrap", "/bin/bash"} } return []string{"/bin/bash"} } return s.helperImageInfo.Cmd } func (s *commandExecutor) requestBuildContainer() (*container.InspectResponse, error) { s.lock.Lock() defer s.lock.Unlock() if s.hasExistingContainer(buildContainerType, s.buildContainer) { return s.buildContainer, nil } var err error s.buildContainer, err = s.createContainer( buildContainerType, s.Build.Image, []string{}, newDefaultContainerConfigurator(&s.executor, buildContainerType, s.Build.Image, s.BuildShell.DockerCommand, []string{}), ) if err != nil { return nil, err } if data, ok := s.Build.ExecutorData.(*executorData); ok { data.ContainerName = s.buildContainer.Name } if s.Build.IsFeatureFlagOn(featureflags.UseConcrete) { return s.buildContainer, nil } err = s.changeFilesOwnership() if err != nil { return nil, err } return s.buildContainer, nil } func (s *commandExecutor) changeFilesOwnership() error { if !s.isUmaskDisabled() { return nil } dockerExec := exec.NewDocker(s.Context, s.dockerConn, s.waiter, s.Build.Log()) inspect := user.NewInspect(s.dockerConn, dockerExec) imageSHA := s.buildContainer.Image imageName := s.Build.Image.Name log := s.Build.Log().WithFields(logrus.Fields{ "imageSHA": imageSHA, "imageName": imageName, }) log.Debug("Checking if image runs with root user") usesRoot, err := inspect.IsRoot(s.Context, imageSHA) if err != nil { return fmt.Errorf("checking if image %q runs as root: %w", imageName, err) } if usesRoot { log.Debug("Image uses root user") return nil } log.Debug("Image doesn't use root user") uid, gid, err := getUIDandGID(s.Context, log, inspect, s.buildContainer.ID, imageSHA) if err != nil { return err } if uid == 0 { return nil } return s.executeChown(dockerExec, uid, gid) } func getUIDandGID( ctx context.Context, log logrus.FieldLogger, inspect user.Inspect, buildContainerID string, imageSHA string, ) (int, int, error) { containerLog := log.WithField("container", buildContainerID) containerLog.Debug("Getting the UID of the container") uid, err := inspect.UID(ctx, buildContainerID) if err != nil { return 0, 0, fmt.Errorf("checking %q image UID: %w", imageSHA, err) } containerLog.Debugf("Container UID=%d", uid) containerLog.Debug("Getting the GID of the container") gid, err := inspect.GID(ctx, buildContainerID) if err != nil { return 0, 0, fmt.Errorf("checking %q image GID: %w", imageSHA, err) } containerLog.Debugf("Container GID=%d", gid) return uid, gid, err } func (s *commandExecutor) executeChown(dockerExec exec.Docker, uid int, gid int) error { c, err := s.requestHelperContainer() if err != nil { return fmt.Errorf("requesting new predefined container: %w", err) } err = s.executeChownOnDir(c, dockerExec, uid, gid, s.Build.FullProjectDir()) if err != nil { return err } err = s.executeChownOnDir(c, dockerExec, uid, gid, s.Build.TmpProjectDir()) if err != nil { return err } return nil } func (s *commandExecutor) executeChownOnDir( c *container.InspectResponse, dockerExec exec.Docker, uid int, gid int, dir string, ) error { s.BuildLogger.Println(fmt.Sprintf("Changing ownership of files at %q to %d:%d", dir, uid, gid)) output := new(bytes.Buffer) // limit how much data we read from the container log to // avoid memory exhaustion lw := limitwriter.New(output, 1024) streams := exec.IOStreams{ Stdin: strings.NewReader(fmt.Sprintf("chown -RP -- %d:%d %q", uid, gid, dir)), Stderr: lw, Stdout: lw, } err := dockerExec.Exec(s.Context, c.ID, streams, nil) log := s.Build.Log().WithField("updatedDir", dir) log.WithField("output", output.String()).Debug("Changing ownership of files") if err != nil { log.WithError(err).Error("Failed to change ownership of files") } return nil } func (s *commandExecutor) GetMetricsSelector() string { return fmt.Sprintf("instance=%q", s.executor.info.Name) } func newDockerOptions() executors.ExecutorOptions { return executors.ExecutorOptions{ DefaultCustomBuildsDirEnabled: true, DefaultSafeDirectoryCheckout: true, DefaultBuildsDir: "/builds", DefaultCacheDir: "/cache", SharedBuildsDir: false, Shell: common.ShellScriptInfo{ Shell: "bash", Type: common.NormalShell, RunnerCommand: "/usr/bin/gitlab-runner-helper", }, ShowHostname: true, } } func newDockerCreator(options executors.ExecutorOptions) func() common.Executor { return func() common.Executor { e := &commandExecutor{ executor: executor{ AbstractExecutor: executors.AbstractExecutor{ ExecutorOptions: options, }, }, } e.SetCurrentStage(common.ExecutorStageCreated) return e } } func dockerFeaturesUpdater(features *common.FeaturesInfo) { features.Image = true features.ImageExecutorOpts = true features.NativeStepsIntegration = true features.ServiceExecutorOpts = true features.ServiceMultipleAliases = true features.ServiceVariables = true features.Services = true features.Session = true features.Terminal = true features.Variables = true } func NewProvider() common.ExecutorProvider { options := newDockerOptions() return executorProvider{ DefaultExecutorProvider: executors.DefaultExecutorProvider{ Creator: newDockerCreator(options), FeaturesUpdater: dockerFeaturesUpdater, ConfigUpdater: configUpdater, DefaultShellName: options.Shell.Shell, }, } } func NewWindowsProvider() common.ExecutorProvider { options := newDockerOptions() windowsFeaturesUpdater := func(features *common.FeaturesInfo) { dockerFeaturesUpdater(features) features.NativeStepsIntegration = false } return executorProvider{ DefaultExecutorProvider: executors.DefaultExecutorProvider{ Creator: newDockerCreator(options), FeaturesUpdater: windowsFeaturesUpdater, ConfigUpdater: configUpdater, DefaultShellName: options.Shell.Shell, }, } } ================================================ FILE: executors/docker/docker_command_integration_test.go ================================================ //go:build integration package docker_test import ( "bytes" "context" "crypto/md5" "crypto/sha1" "fmt" "io" "math/rand" "net/url" "os" "os/exec" "path/filepath" "regexp" "runtime" "strconv" "strings" "sync" "testing" "time" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/filters" "github.com/docker/docker/api/types/volume" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/cache/cacheconfig" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildtest" "gitlab.com/gitlab-org/gitlab-runner/common/spec" docker_executor "gitlab.com/gitlab-org/gitlab-runner/executors/docker" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/prebuilt" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/container/windows" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/test" "gitlab.com/gitlab-org/gitlab-runner/shells" "gitlab.com/gitlab-org/gitlab-runner/shells/shellstest" ) // Specifying container image platform requires API version >= 1.41 const minDockerDaemonVersion = "1.41" var ( getDefaultWindowsImageOnce sync.Once defaultWindowsImage string ) var windowsDockerImageTagMappings = map[string]string{ windows.V1809: "ltsc2019", windows.V21H2: "ltsc2022", } func TestMain(m *testing.M) { prebuilt.PrebuiltImagesPaths = []string{"../../out/helper-images/"} os.Exit(m.Run()) } // safeBuffer is used for tests that are writing build logs to a buffer and // reading the build logs waiting for a log line. type safeBuffer struct { buf *bytes.Buffer mu sync.RWMutex } func newSafeBuffer() *safeBuffer { return &safeBuffer{ buf: &bytes.Buffer{}, mu: sync.RWMutex{}, } } func (s *safeBuffer) Read(p []byte) (n int, err error) { s.mu.RLock() defer s.mu.RUnlock() return s.buf.Read(p) } func (s *safeBuffer) Write(p []byte) (n int, err error) { s.mu.Lock() defer s.mu.Unlock() return s.buf.Write(p) } func (s *safeBuffer) String() string { s.mu.RLock() defer s.mu.RUnlock() return s.buf.String() } func TestDockerCommandMultistepBuild(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { buildGetter func() (spec.Job, error) expectedOutput []string unwantedOutput []string errExpected bool }{ "Successful build with release and after_script step": { buildGetter: common.GetRemoteSuccessfulMultistepBuild, expectedOutput: []string{ "echo Hello World", "echo Release", "echo After Script", }, errExpected: false, }, "Failure on script step. Release is skipped. After script runs.": { buildGetter: func() (spec.Job, error) { return common.GetRemoteFailingMultistepBuild(spec.StepNameScript) }, expectedOutput: []string{ "echo Hello World", "echo After Script", }, unwantedOutput: []string{ "echo Release", }, errExpected: true, }, "Failure on release step. After script runs.": { buildGetter: func() (spec.Job, error) { return common.GetRemoteFailingMultistepBuild("release") }, expectedOutput: []string{ "echo Hello World", "echo Release", "echo After Script", }, errExpected: true, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { build := getBuildForOS(t, tt.buildGetter) var buf bytes.Buffer err := build.Run(&common.Config{}, &common.Trace{Writer: &buf}) out := buf.String() for _, output := range tt.expectedOutput { assert.Contains(t, out, output) } for _, output := range tt.unwantedOutput { assert.NotContains(t, out, output) } if tt.errExpected { var buildErr *common.BuildError assert.ErrorAs(t, err, &buildErr) assert.Equal(t, 1, buildErr.ExitCode) return } assert.NoError(t, err) }) } } func getBuildForOS(t *testing.T, getJobResp func() (spec.Job, error)) common.Build { jobResp, err := getJobResp() require.NoError(t, err) build := common.Build{ Job: jobResp, Runner: getRunnerConfigForOS(t), ExecutorProvider: docker_executor.NewProvider(), } return build } func getRunnerConfigForOS(t *testing.T) *common.RunnerConfig { executor := "docker" image := common.TestAlpineImage shell := "bash" if runtime.GOOS == "windows" { shell = shells.SNPowershell image = getDefaultWindowsImage(t) } return &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: executor, Shell: shell, Docker: &common.DockerConfig{ Image: image, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, Cache: &cacheconfig.Config{}, }, RunnerCredentials: common.RunnerCredentials{ Token: fmt.Sprintf("%x", md5.Sum([]byte(t.Name()))), }, } } // windowsDockerImageTag checks the specified kernel version to see if it's one of the // supported Windows version. If true, it maps a compatible mcr.microsoft.com Docker image tag. // UnsupportedWindowsVersionError is returned when no supported Windows version // is found in the string. func windowsDockerImageTag(version string, tagMap map[string]string) (string, error) { version, err := windows.Version(version) if err != nil { return "", err } dockerTag, ok := tagMap[version] if !ok { dockerTag = version } return dockerTag, nil } func getDefaultWindowsImage(t *testing.T) string { getDefaultWindowsImageOnce.Do(func() { defaultWindowsImage = getWindowsImage(t, common.TestWindowsImage, windowsDockerImageTagMappings) }) return defaultWindowsImage } func getWindowsImage(t *testing.T, imageRef string, tagMap map[string]string) string { client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") defer client.Close() info, err := client.Info(context.Background()) require.NoError(t, err, "docker info") dockerImageTag, err := windowsDockerImageTag(info.KernelVersion, tagMap) require.NoError(t, err) return fmt.Sprintf(imageRef, dockerImageTag) } func TestBuildPassingEnvsMultistep(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") shellstest.OnEachShell(t, func(t *testing.T, shell string) { if shell == "pwsh" { t.Skipf("%s not supported", shell) } runnerConfig := getRunnerConfigForOS(t) runnerConfig.RunnerSettings.Shell = shell buildtest.RunBuildWithPassingEnvsMultistep(t, runnerConfig, setupExecutor) }) } func TestDockerCommandSuccessRunRawVariable(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse("echo $TEST") }) value := "$VARIABLE$WITH$DOLLARS$$" build.Variables = append(build.Variables, spec.Variable{ Key: "TEST", Value: value, Raw: true, }) out, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) assert.Contains(t, out, value) } func TestDockerCommandSuccessRunFileVariableContent(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, func() (spec.Job, error) { if runtime.GOOS == "windows" { return common.GetRemoteBuildResponse(`Get-Filehash -Algorithm SHA1 -Path $TEST`) } return common.GetRemoteBuildResponse(`sha1sum $TEST | tr "[a-z]" "[A-Z]"`) }) value := "this is the content" build.Variables = append(build.Variables, spec.Variable{ Key: "TEST", Value: value, File: true, Raw: true, }) out, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) assert.Contains(t, out, fmt.Sprintf("%X", sha1.Sum([]byte(value)))) } func TestBuildScriptSections(t *testing.T) { shellstest.OnEachShell(t, func(t *testing.T, shell string) { if shell == "pwsh" || shell == "powershell" { // support for pwsh and powershell tracked in https://gitlab.com/gitlab-org/gitlab-runner/-/issues/28119 t.Skip("pwsh, powershell not supported") } build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse(`echo "Hello World"`) }) build.Runner.RunnerSettings.Shell = shell buildtest.RunBuildWithSections(t, &build) }) } func TestDockerCommandUsingCustomClonePath(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") remoteBuild := func() (spec.Job, error) { cmd := "ls -al $CI_BUILDS_DIR/go/src/gitlab.com/gitlab-org/repo" if runtime.GOOS == "windows" { cmd = "Get-Item -Path $CI_BUILDS_DIR/go/src/gitlab.com/gitlab-org/repo" } return common.GetRemoteBuildResponse(cmd) } tests := map[string]struct { clonePath string expectedErr bool }{ "uses custom clone path": { clonePath: "$CI_BUILDS_DIR/go/src/gitlab.com/gitlab-org/repo", expectedErr: false, }, "path has to be within CI_BUILDS_DIR": { clonePath: "/unknown/go/src/gitlab.com/gitlab-org/repo", expectedErr: true, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build := getBuildForOS(t, remoteBuild) build.Runner.Environment = []string{ "GIT_CLONE_PATH=" + test.clonePath, } err := buildtest.RunBuild(t, &build) if test.expectedErr { var buildErr *common.BuildError assert.ErrorAs(t, err, &buildErr) return } assert.NoError(t, err) }) } } func TestDockerCommandNoRootImage(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuildWithDumpedVariables() assert.NoError(t, err) successfulBuild.Image.Name = common.TestAlpineNoRootImage build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestDockerCommandEntrypointWithStderrOutput(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") resp, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) resp.Image.Name = common.TestAlpineEntrypointStderrImage build := &common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, FeatureFlags: map[string]bool{ featureflags.DisableUmaskForDockerExecutor: true, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestDockerCommandOwnershipOverflow(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") resp, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) resp.Image.Name = common.TestAlpineIDOverflowImage build := &common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, FeatureFlags: map[string]bool{ featureflags.DisableUmaskForDockerExecutor: true, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } trace := &common.Trace{Writer: os.Stdout} timeoutTimer := time.AfterFunc(2*time.Minute, func() { trace.Abort() }) defer timeoutTimer.Stop() err = build.Run(&common.Config{}, trace) assert.Error(t, err) // error is only canceled if it timed out, something that will only happen // if data from the overflow isn't safely limited. assert.NotErrorIs(t, err, &common.BuildError{FailureReason: common.JobCanceled}) } func TestDockerCommandWithAllowedImagesRun(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() successfulBuild.Image = spec.Image{Name: "$IMAGE_NAME"} successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: "IMAGE_NAME", Value: common.TestAlpineImage, Public: true, Internal: false, File: false, }) successfulBuild.Services = append(successfulBuild.Services, spec.Image{Name: common.TestDockerDindImage}) assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ AllowedImages: []string{common.TestAlpineImage}, AllowedServices: []string{common.TestDockerDindImage}, Privileged: true, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestDockerCommandDisableEntrypointOverwrite(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := []struct { name string services bool disabled bool }{ { name: "Disabled - no services", disabled: true, }, { name: "Disabled - services", disabled: true, services: true, }, { name: "Enabled - no services", }, { name: "Enabled - services", services: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) successfulBuild.Image.Entrypoint = []string{"/bin/sh", "-c", "echo 'image overwritten'"} if test.services { successfulBuild.Services = spec.Services{ spec.Image{ Name: common.TestDockerDindImage, Entrypoint: []string{"/bin/sh", "-c", "echo 'service overwritten'"}, }, } } build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Privileged: true, Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, DisableEntrypointOverwrite: test.disabled, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } var buffer bytes.Buffer err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) out := buffer.String() if test.disabled { assert.NotContains(t, out, "image overwritten") assert.NotContains(t, out, "service overwritten") assert.Contains(t, out, "Entrypoint override disabled") } else { assert.Contains(t, out, "image overwritten") if test.services { assert.Contains(t, out, "service overwritten") } } }) } } func TestDockerCommandMissingImage(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetSuccessfulBuild) build.Runner.Docker.Image = "some/non-existing/image" err := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) require.Error(t, err) assert.ErrorIs(t, err, &common.BuildError{FailureReason: common.ImagePullFailure}) assert.Regexp(t, regexp.MustCompile("not found|repository does not exist|invalid repository name"), err.Error()) } func TestDockerCommandMissingTag(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetSuccessfulBuild) build.Runner.Docker.Image = "docker:missing-tag" err := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) require.Error(t, err) assert.ErrorIs(t, err, &common.BuildError{FailureReason: common.ImagePullFailure}) assert.Contains(t, err.Error(), "not found") } func TestDockerCommandMissingServiceImage(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetSuccessfulBuild) build.Services = spec.Services{ { Name: "some/non-existing/image", }, } err := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) require.Error(t, err) assert.ErrorIs(t, err, &common.BuildError{FailureReason: common.ImagePullFailure}) assert.Regexp(t, regexp.MustCompile("not found|repository does not exist|invalid repository name"), err.Error()) } // TestDockerCommandPullingImageNoHost tests if the DNS resolution failure for the registry host // is categorized as a script failure. func TestDockerCommandPullingImageNoHost(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetSuccessfulBuild) build.Runner.RunnerSettings.Docker.Image = "docker.repo.example.com/docker:23-dind" var buildError *common.BuildError err := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) require.ErrorAs(t, err, &buildError) assert.Equal(t, common.ImagePullFailure, buildError.FailureReason, "expected script failure error") } func TestDockerCommandBuildCancel(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") buildtest.RunBuildWithCancel(t, getRunnerConfigForOS(t), setupExecutor) } func TestBuildMasking(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") buildtest.RunBuildWithMasking(t, getRunnerConfigForOS(t), setupExecutor) } func TestBuildMaskingProxyExec(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") buildtest.RunBuildWithMaskingProxyExec(t, getRunnerConfigForOS(t), setupExecutor) } func TestBuildExpandedFileVariable(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") shellstest.OnEachShell(t, func(t *testing.T, shell string) { build := getBuildForOS(t, common.GetSuccessfulBuild) buildtest.RunBuildWithExpandedFileVariable(t, build.Runner, setupExecutor) }) } func TestDockerCommandTwoServicesFromOneImage(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { variables spec.Variables }{ "bridge network": { variables: spec.Variables{}, }, "network per build": { variables: spec.Variables{ { Key: featureflags.NetworkPerBuild, Value: "true", }, }, }, } successfulBuild, err := common.GetRemoteSuccessfulBuild() successfulBuild.Services = spec.Services{ {Name: common.TestAlpineImage, Alias: "service-1"}, {Name: common.TestAlpineImage, Alias: "service-2"}, } assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } for name, tt := range tests { t.Run(name, func(t *testing.T) { var buffer bytes.Buffer build.Variables = tt.variables err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) str := buffer.String() re, err := regexp.Compile("(?m)Conflict. The container name [^ ]+ is already in use by container") require.NoError(t, err) assert.NotRegexp(t, re, str, "Both service containers should be started and use different name") }) } } func TestDockerCommandServiceNameEmpty(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { variables spec.Variables }{ "bridge network": { variables: spec.Variables{}, }, "network per build": { variables: spec.Variables{ { Key: featureflags.NetworkPerBuild, Value: "true", }, }, }, } successfulBuild, err := common.GetRemoteSuccessfulBuild() successfulBuild.Services = spec.Services{ {Name: "", Alias: "service-1"}, // Name can be empty if for example env variable expands to empty string. } assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } for name, tt := range tests { t.Run(name, func(t *testing.T) { var buffer bytes.Buffer build.Variables = tt.variables err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) str := buffer.String() // Shouldn't be considered a system failure var buildErr *common.BuildError assert.ErrorAs(t, err, &buildErr) assert.NotContains(t, str, "system failure") }) } } func TestDockerCommandOutput(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } var buffer bytes.Buffer err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) pattern := regexp.MustCompile(`(?m)^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\S+\s+Initialized empty Git repository in /builds/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test/.git/`) assert.Regexp(t, pattern, buffer.String()) } func TestDockerPrivilegedServiceAccessingBuildsFolder(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") job, err := common.GetRemoteBuildResponse( "docker info", "docker run -v $(pwd):$(pwd) -w $(pwd) busybox touch test", "cat test", ) assert.NoError(t, err) strategies := []string{ "fetch", "clone", } for _, strategy := range strategies { t.Log("Testing", strategy, "strategy...") build := getTestDockerJob(t, job) build.Image.Name = common.TestDockerGitImage build.Services = spec.Services{ spec.Image{ Name: common.TestDockerDindImage, // set bip manually to prevent DinD-ception networking problems // and avoid collision with: // - docker daemon on the host // - dind as a service to the CI job running this test // - dind as a service to this test Command: []string{"--bip", "172.30.0.1/16"}, }, } build.Variables = append(build.Variables, spec.Variable{ Key: "GIT_STRATEGY", Value: strategy, }) err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } } func getTestDockerJob(t *testing.T, job spec.Job) *common.Build { job.Variables = append(job.Variables, spec.Variable{Key: "DOCKER_TLS_VERIFY", Value: "1"}, spec.Variable{Key: "DOCKER_TLS_CERTDIR", Value: "/certs"}, spec.Variable{Key: "DOCKER_CERT_PATH", Value: "/certs/client"}, ) build := &common.Build{ Job: job, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Privileged: true, Volumes: []string{"/certs"}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } return build } func TestDockerExtendedConfigurationFromJob(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") examples := []struct { image spec.Image services spec.Services variables spec.Variables }{ { image: spec.Image{ Name: "$IMAGE_NAME", Entrypoint: []string{"sh", "-c"}, }, services: spec.Services{ spec.Image{ Name: "$SERVICE_NAME", Entrypoint: []string{"sh", "-c"}, Command: []string{"dockerd-entrypoint.sh"}, Alias: "my-docker-service", }, }, variables: spec.Variables{ {Key: "DOCKER_HOST", Value: "tcp://docker:2376"}, {Key: "IMAGE_NAME", Value: common.TestDockerGitImage}, {Key: "SERVICE_NAME", Value: common.TestDockerDindImage}, }, }, { image: spec.Image{ Name: "$IMAGE_NAME", }, services: spec.Services{ spec.Image{ Name: "$SERVICE_NAME", }, }, variables: spec.Variables{ {Key: "DOCKER_HOST", Value: "tcp://docker:2376"}, {Key: "IMAGE_NAME", Value: common.TestDockerGitImage}, {Key: "SERVICE_NAME", Value: common.TestDockerDindImage}, }, }, } for exampleID, example := range examples { t.Run(fmt.Sprintf("example-%d", exampleID), func(t *testing.T) { job, err := common.GetRemoteBuildResponse("docker info") assert.NoError(t, err) build := getTestDockerJob(t, job) build.Image = example.image build.Services = example.services build.Variables = append(build.Variables, example.variables...) err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) }) } } func TestCacheInContainer(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) successfulBuild.JobInfo.ProjectID = time.Now().Unix() successfulBuild.Steps[0].Script = spec.StepScript{ "(test -d cached/ && ls -lh cached/) || echo \"no cached directory\"", "(test -f cached/date && cat cached/date) || echo \"no cached date\"", "mkdir -p cached", "date > cached/date", } successfulBuild.Cache = spec.Caches{ spec.Cache{ Key: "key", Paths: spec.ArtifactPaths{"cached/*"}, Policy: spec.CachePolicyPullPush, When: spec.CacheWhenOnSuccess, }, } build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Volumes: []string{"/cache"}, }, Cache: &cacheconfig.Config{}, }, }, ExecutorProvider: docker_executor.NewProvider(), } cacheNotPresentRE := regexp.MustCompile(`(?m)^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\w+\s+no cached directory`) skipCacheDownload := "Not downloading cache key due to policy" skipCacheUpload := "Not uploading cache key due to policy" // The first job lacks any cache to pull, but tries to both pull and push output, err := buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.Regexp(t, cacheNotPresentRE, output, "First job execution should not have cached data") assert.NotContains( t, output, skipCacheDownload, "Cache download should be performed with policy: %s", spec.CachePolicyPullPush, ) assert.NotContains( t, output, skipCacheUpload, "Cache upload should be performed with policy: %s", spec.CachePolicyPullPush, ) // pull-only jobs should skip the push step build.Job.Cache[0].Policy = spec.CachePolicyPull output, err = buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.NotRegexp(t, cacheNotPresentRE, output, "Second job execution should have cached data") assert.NotContains( t, output, skipCacheDownload, "Cache download should be performed with policy: %s", spec.CachePolicyPull, ) assert.Contains( t, output, skipCacheUpload, "Cache upload should be skipped with policy: %s", spec.CachePolicyPull, ) // push-only jobs should skip the pull step build.Job.Cache[0].Policy = spec.CachePolicyPush output, err = buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.Regexp(t, cacheNotPresentRE, output, "Third job execution should not have cached data") assert.Contains(t, output, skipCacheDownload, "Cache download be skipped with policy: push") assert.NotContains(t, output, skipCacheUpload, "Cache upload should be performed with policy: push") // For failed job it should push cache as well. build.Job.Cache[0].Policy = spec.CachePolicyPullPush build.Job.Cache[0].When = spec.CacheWhenAlways build.Job.Steps[0].Script = append(build.Job.Steps[0].Script, "exit 1") output, err = buildtest.RunBuildReturningOutput(t, build) require.Error(t, err) assert.NotRegexp(t, cacheNotPresentRE, output, "Second job execution should have cached data") assert.Contains(t, output, "Saving cache for failed job") assert.Contains(t, output, "Created cache") } func TestDockerImageNameFromVariable(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: "CI_REGISTRY_IMAGE", Value: common.TestAlpineImage, }) successfulBuild.Image = spec.Image{ Name: "$CI_REGISTRY_IMAGE", } assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, AllowedServices: []string{common.TestAlpineImage}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } re := regexp.MustCompile("(?m)^ERROR: The [^ ]+ is not present on list of allowed images") output, err := buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.NotRegexp(t, re, output, "Image's name should be expanded from variable") } func TestDockerServiceNameFromVariable(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: "CI_REGISTRY_IMAGE", Value: common.TestAlpineImage, }) successfulBuild.Services = append(successfulBuild.Services, spec.Image{ Name: "$CI_REGISTRY_IMAGE", }) assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, AllowedServices: []string{common.TestAlpineImage}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } re := regexp.MustCompile("(?m)^ERROR: The [^ ]+ is not present on list of allowed services") output, err := buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) assert.NotRegexp(t, re, output, "Service's name should be expanded from variable") } func TestDockerServiceHealthcheck(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { command []string serviceStarted bool networkPerBuild string skip bool port int variables spec.Variables }{ "successful service (FF_NETWORK_PER_BUILD=false)": { command: []string{"server"}, serviceStarted: true, networkPerBuild: "false", skip: runtime.GOOS == "windows", }, "successful service (FF_NETWORK_PER_BUILD=true)": { command: []string{"server"}, serviceStarted: true, networkPerBuild: "true", skip: false, }, "successful service explicit port (FF_NETWORK_PER_BUILD=false)": { command: []string{"server", "--addr", ":8888"}, serviceStarted: true, networkPerBuild: "false", skip: runtime.GOOS == "windows", port: 8888, variables: []spec.Variable{{Key: "HEALTHCHECK_TCP_PORT", Value: "8888"}}, }, "successful service explicit port (FF_NETWORK_PER_BUILD=true)": { command: []string{"server", "--addr", ":8888"}, serviceStarted: true, networkPerBuild: "true", skip: false, port: 8888, variables: []spec.Variable{{Key: "HEALTHCHECK_TCP_PORT", Value: "8888"}}, }, "failed service (FF_NETWORK_PER_BUILD=false)": { command: []string{"server", "--addr", ":8888"}, serviceStarted: false, networkPerBuild: "false", skip: runtime.GOOS == "windows", }, "failed service (FF_NETWORK_PER_BUILD=true)": { command: []string{"server", "--addr", ":8888"}, serviceStarted: false, networkPerBuild: "true", skip: false, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { if tc.skip { t.Skipf("OS %q does not support 'link' networking", runtime.GOOS) } if tc.port == 0 { tc.port = 80 } resp, err := common.GetRemoteBuildResponse( fmt.Sprintf("liveness client db:%d", tc.port), fmt.Sprintf("liveness client registry.gitlab.com__gitlab-org__ci-cd__tests__liveness:%d", tc.port), fmt.Sprintf("liveness client registry.gitlab.com-gitlab-org-ci-cd-tests-liveness:%d", tc.port), ) require.NoError(t, err) build := common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ WaitForServicesTimeout: 15, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } build.Image = spec.Image{ Name: common.TestLivenessImage, Entrypoint: []string{""}, } if runtime.GOOS == "windows" { build.Runner.Docker.WaitForServicesTimeout = 60 build.Runner.RunnerSettings.Shell = shells.SNPwsh } build.Services = append(build.Services, spec.Image{ Name: common.TestLivenessImage, Alias: "db", Command: tc.command, Variables: tc.variables, }) build.Variables = append(build.Variables, spec.Variable{ Key: "FF_NETWORK_PER_BUILD", Value: tc.networkPerBuild, Public: true, }) out, err := buildtest.RunBuildReturningOutput(t, &build) if !tc.serviceStarted { assert.Error(t, err) assert.Contains(t, out, "probably didn't start properly") return } assert.NoError(t, err) assert.NotContains(t, out, "probably didn't start properly") }) } } func TestDockerServiceAliases(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") if runtime.GOOS == "windows" { t.Skip() } // script that works in alpine image: // - resolve 'my_service' to an IP // - gather all the other aliases that resolve that IP // - fetch from them all to test they resolve correctly resp, err := common.GetRemoteBuildResponse( `ip=$(awk '/my_service/{print $1;exit}' /etc/hosts) && ` + `awk -v ip="$ip" '$1==ip{for(i=2;i<=NF;i++)print $i}' /etc/hosts | xargs -I{} sh -c 'echo "Testing: {}"; wget -q --spider "{}"'`, ) require.NoError(t, err) resp.Image = spec.Image{Name: common.TestAlpineImage} resp.Services = []spec.Image{ { Name: common.TestLivenessImage, Alias: "my_service", Command: []string{"server", "--addr", ":80"}, }, } build := common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ WaitForServicesTimeout: 15, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } out, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) assert.Contains(t, out, "Testing: registry.gitlab.com__gitlab-org__ci-cd__tests__liveness") assert.Contains(t, out, "Testing: registry.gitlab.com-gitlab-org-ci-cd-tests-liveness") assert.Contains(t, out, "Testing: my_service") assert.Regexp(t, `Testing: [0-9a-f]{12}`, out) // service container ID } func TestDockerServiceHealthcheckOverflow(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") resp, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) build := &common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{}, }, }, ExecutorProvider: docker_executor.NewProvider(), } build.Image = spec.Image{ Name: common.TestAlpineImage, } build.Services = append(build.Services, spec.Image{ Name: "alpine:3.22", Command: []string{"sh", "-c", "printf 'datastart: %" + strconv.Itoa(docker_executor.ServiceLogOutputLimit) + "s' ':dataend'"}, }) build.Variables = append(build.Variables, spec.Variable{ Key: "FF_NETWORK_PER_BUILD", Value: "true", Public: true, }) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "datastart:") assert.NotContains(t, out, ":dataend") } func TestDockerHandlesAliasDuplicates(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") resp, err := common.GetRemoteBuildResponse("ping -c 1 alpine && ping -c 1 svc-1") assert.NoError(t, err) build := &common.Build{ Job: resp, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{WaitForServicesTimeout: 5}, }, }, ExecutorProvider: docker_executor.NewProvider(), } build.Image = spec.Image{ Name: common.TestAlpineImage, } build.Services = append(build.Services, spec.Image{ Name: common.TestAlpineImage, Command: []string{"sleep", "15"}, Alias: "alpine alpine svc-1 svc-1", }) build.Variables = append(build.Variables, spec.Variable{ Key: "FF_NETWORK_PER_BUILD", Value: "true", Public: true, }) out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "PING alpine") assert.Contains(t, out, "PING svc-1") } func runDockerInDocker(version string) (id string, err error) { cmd := exec.Command("docker", "run", "--detach", "--privileged", "-p", "2375", "docker:"+version+"-dind") cmd.Stderr = os.Stderr data, err := cmd.Output() if err != nil { return id, err } id = strings.TrimSpace(string(data)) return id, err } func getDockerCredentials(id string) (credentials docker.Credentials, err error) { cmd := exec.Command("docker", "port", id, "2375") cmd.Stderr = os.Stderr data, err := cmd.Output() if err != nil { return credentials, err } hostPort := strings.Split(strings.TrimSpace(string(data)), ":") if dockerHost, err := url.Parse(os.Getenv("DOCKER_HOST")); err == nil { dockerHostPort := strings.Split(dockerHost.Host, ":") hostPort[0] = dockerHostPort[0] } else if hostPort[0] == "0.0.0.0" { hostPort[0] = "localhost" } credentials.Host = "tcp://" + hostPort[0] + ":" + hostPort[1] return credentials, err } func waitForDocker(credentials docker.Credentials) error { client, err := docker.New(credentials) if err != nil { return err } for i := 0; i < 20; i++ { _, err = client.Info(context.Background()) if err == nil { break } time.Sleep(time.Second) } return err } func testDockerVersion(t *testing.T, version string) { t.Log("Running docker", version, "...") id, err := runDockerInDocker(version) if err != nil { t.Error("Docker run:", err) return } defer func() { _ = exec.Command("docker", "rm", "-f", "-v", id).Run() }() t.Log("Getting address of", version, "...") credentials, err := getDockerCredentials(id) if err != nil { t.Error("Docker credentials:", err) return } t.Log("Connecting to", credentials.Host, "...") err = waitForDocker(credentials) if err != nil { t.Error("Wait for docker:", err) return } t.Log("Docker", version, "is running at", credentials.Host) successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Credentials: credentials, CPUS: "0.1", }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestDocker1_8Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.8") } func TestDocker1_9Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.9") } func TestDocker1_10Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.10") } func TestDocker1_11Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.11") } func TestDocker1_12Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.12") } func TestDocker1_13Compatibility(t *testing.T) { test.SkipIfGitLabCIWithMessage(t, "This test doesn't work in nested dind") helpers.SkipIntegrationTests(t, "docker", "info") testDockerVersion(t, "1.13") } func TestDockerCommandWithGitSSLCAInfo(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteGitLabComTLSBuild() assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ URL: "https://gitlab.com", }, RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } var buffer bytes.Buffer err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) out := buffer.String() assert.Contains(t, out, "Created fresh repository") assert.Contains(t, out, "Updating/initializing submodules") } func TestDockerCommandWithHelperImageConfig(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") helperImageConfig := "gitlab/gitlab-runner-helper:x86_64-v16.9.1" successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, HelperImage: helperImageConfig, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, // Ensure ProxyExec is disabled as the gitlab-runner-helper image above doesn't contain // the proxy_exec subcommand. ProxyExec: func() *bool { v := false; return &v }(), }, }, ExecutorProvider: docker_executor.NewProvider(), } var buffer bytes.Buffer err = build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) out := buffer.String() assert.Contains( t, out, "Using docker image sha256:be0a1939d88dbce6f18b0885662080a6aabc49d7e5e51c6021f36ce327614b13 for "+ "gitlab/gitlab-runner-helper:x86_64-v16.9.1 with digest "+ "gitlab/gitlab-runner-helper@sha256:24432bb8b93507e7bc4b87327c24317029f1ea0315abf1bc7f71148f2555d681 ...", ) } func TestDockerCommand_Pwsh(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Image.Name = common.TestPwshImage build.Runner.Shell = shells.SNPwsh build.Job.Steps = spec.Steps{ spec.Step{ Name: spec.StepNameScript, Script: []string{ "Write-Output $PSVersionTable", }, Timeout: 120, When: spec.StepWhenAlways, AllowFailure: false, }, } out, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) assert.Regexp(t, `PSVersion\s+7.1.1`, out) assert.Regexp(t, `PSEdition\s+Core`, out) } func TestDockerCommandWithDoingPruneAndAfterScript(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuildWithAfterScript() dockerSocket := "/var/run/docker.sock" successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: "DOCKER_HOST", Value: "unix://" + dockerSocket, }) // In CI, it's possible that DOCKER_HOST has been overridden to a different unix // path to usual, so we cater for that. // // This is not something we can typically do outside of CI, because overriding // won't always work (DOCKER_HOST pointing to a file that's on the host, and not VM // in a Docker/Rancher Desktop scenario). In that case, leaving the default is // more likely to work. if _, ok := os.LookupEnv("CI"); ok { if sock := os.Getenv("DOCKER_HOST"); strings.HasPrefix(sock, "unix://") { dockerSocket = strings.TrimPrefix(sock, "unix://") } } // This scripts removes self-created containers that do exit // It will fail if: cannot be removed, or no containers is found // It is assuming that name of each runner created container starts // with `runner-doprune-` successfulBuild.Steps[0].Script = spec.StepScript{ "docker ps -a -f status=exited | grep runner-doprune-", "docker rm $(docker ps -a -f status=exited | grep runner-doprune- | awk '{print $1}')", } assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "doprune", }, RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestDockerGitImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Volumes: []string{ dockerSocket + ":/var/run/docker.sock", }, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) } func TestDockerCommandRunAttempts(t *testing.T) { t.Skip("Skipping until https://gitlab.com/gitlab-org/gitlab-runner/-/issues/25385 is resolved.") helpers.SkipIntegrationTests(t, "docker", "info") sleepCMD := "sleep 60" executorStageAttempts := 2 build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Runner.RunnerCredentials.Token = "misscont" build.Job.Steps = spec.Steps{ spec.Step{ Name: spec.StepNameScript, Script: []string{ sleepCMD, }, Timeout: 120, When: spec.StepWhenAlways, AllowFailure: false, }, } build.Job.Variables = append(build.Job.Variables, spec.Variable{ Key: common.ExecutorJobSectionAttempts, Value: strconv.Itoa(executorStageAttempts), Public: true, }) trace := newSafeBuffer() runFinished := make(chan struct{}) go func() { err := build.Run(&common.Config{}, &common.Trace{Writer: io.MultiWriter(trace, os.Stdout)}) // Only make sure that the build failed. Docker can return different // kind of errors when a container is removed for example exit code 137, // there is no guarantee on what failure is returned. assert.Error(t, err) close(runFinished) }() // Waiting until we reach the first sleep command in the build. for { if !strings.Contains(trace.String(), sleepCMD) { time.Sleep(time.Second) continue } break } attempts := 0 for i := 0; i < executorStageAttempts; i++ { assertFailedToInspectContainer(t, trace, &attempts) } assert.Equal( t, executorStageAttempts, attempts, "The %s stage should be retried at least once", "step_script", ) <-runFinished } func assertFailedToInspectContainer(t *testing.T, trace *safeBuffer, attempts *int) { // If there is already an exit code, return early since a new container will // never be scheduled. if strings.Contains(trace.String(), "exit code") { return } containerID := <-removeBuildContainer(t) for { if !strings.Contains(trace.String(), fmt.Sprintf("Container %q not found or removed", containerID)) { time.Sleep(time.Second) continue } *attempts++ break } } func removeBuildContainer(t *testing.T) <-chan string { removedContainer := make(chan string, 1) defer close(removedContainer) client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") defer client.Close() var list []types.Container // Keep checking containers until we get the container that we want. for len(list) == 0 { time.Sleep(time.Second) nameFilter := filters.Arg("name", "misscont") containerList := container.ListOptions{ Filters: filters.NewArgs(nameFilter), } list, err = client.ContainerList(context.Background(), containerList) require.NoError(t, err) } for _, ctr := range list { err := client.ContainerRemove(context.Background(), ctr.ID, container.RemoveOptions{Force: true}) require.NoError(t, err) } removedContainer <- list[0].ID return removedContainer } func TestDockerCommandRunAttempts_InvalidAttempts(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Job.Variables = append(build.Job.Variables, spec.Variable{ Key: common.ExecutorJobSectionAttempts, Value: strconv.Itoa(999), Public: true, }) buf := new(bytes.Buffer) err := build.Run(&common.Config{}, &common.Trace{Writer: buf}) require.NoError(t, err) require.Contains(t, buf.String(), "WARNING: EXECUTOR_JOB_SECTION_ATTEMPTS: number of attempts out of the range [1, 10], using default 1") } func TestDockerCommand_WriteToVolumeNonRootImage(t *testing.T) { // non root images on Windows work differently, and `cache-init` doesn't // work on Windows // https://gitlab.com/gitlab-org/gitlab-runner/-/issues/25480. if runtime.GOOS == "windows" { t.Skip("Skipping unix test on windows") } helpers.SkipIntegrationTests(t, "docker", "info") const volumeBind = "/test" const helperImage = "gitlab/gitlab-runner-helper:x86_64-v16.9.1" client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Runner.Docker.Volumes = append(build.Runner.Docker.Volumes, volumeBind) build.Runner.Docker.HelperImage = helperImage // Ensure ProxyExec is disabled as the gitlab-runner-helper image above doesn't contain // the proxy_exec subcommand. build.Runner.RunnerSettings.ProxyExec = func() *bool { v := false; return &v }() build.Job.Steps = spec.Steps{ spec.Step{ Name: spec.StepNameScript, Script: []string{ "echo test > /test/test.txt", }, Timeout: 120, When: spec.StepWhenAlways, AllowFailure: false, }, } build.Image.Name = common.TestAlpineNoRootImage defer func() { volumeName := fmt.Sprintf("%s-cache-%x", build.ProjectUniqueName(), md5.Sum([]byte(volumeBind))) err = client.VolumeRemove(context.Background(), volumeName, true) require.NoError(t, err) }() defer client.Close() err = buildtest.RunBuild(t, &build) assert.NoError(t, err) } func TestChownAndUmaskUsage(t *testing.T) { // On Windows we don't have the chown/umask problem so no need // for doing the tests. Especially that the test is specific for // Unix like platform if runtime.GOOS == "windows" { t.Skip("Skipping unix test on windows") } helpers.SkipIntegrationTests(t, "docker", "info") // nolint:lll umaskUsedUserNotChanged := func(t *testing.T, output string) { assert.NotContains(t, output, "Changing ownership of files") assert.Regexp(t, `drwxrwxrwx\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+director`, output, "directory permissions changed by umask, user root") assert.Regexp(t, `-rwxrwxrwx\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+executable-file`, output, "executable-file permissions changed by umask, user root") assert.Regexp(t, `-rw-rw-rw-\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+regular-file`, output, "regular-file permissions changed by umask, user root") } // nolint:lll umaskNotUsedUserNotChanged := func(t *testing.T, output string) { assert.NotContains(t, output, "Changing ownership of files") assert.Regexp(t, `drwxr-xr-x\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+director`, output, "directory permissions not changed by umask, user root") assert.Regexp(t, `-rwxr-xr-x\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+executable-file`, output, "executable-file permissions not changed by umask, user root") assert.Regexp(t, `-rw-r--r--\s+[0-9]+\s+root\s+root\s+[0-9a-zA-Z: ]+\s+regular-file`, output, "regular-file permissions not changed by umask, user root") } // nolint:lll umaskNotUsedUserChanged := func(t *testing.T, output string) { assert.Contains(t, output, "Changing ownership of files") assert.Regexp(t, `drwxr-xr-x\s+[0-9]+\s+alpine\s+alpine\s+[0-9a-zA-Z: ]+\s+director`, output, "directory permissions not changed by umask, user alpine") assert.Regexp(t, `-rwxr-xr-x\s+[0-9]+\s+alpine\s+alpine\s+[0-9a-zA-Z: ]+\s+executable-file`, output, "executable-file permissions not changed by umask, user alpine") assert.Regexp(t, `-rw-r--r--\s+[0-9]+\s+alpine\s+alpine\s+[0-9a-zA-Z: ]+\s+regular-file`, output, "regular-file permissions not changed by umask, user alpine") } gitInfo := spec.GitInfo{ RepoURL: "https://gitlab.com/gitlab-org/ci-cd/tests/file-permissions.git", Sha: "050d238e16c5962fc16e49ab1b6be1be39778b6c", BeforeSha: "0000000000000000000000000000000000000000", Ref: "main", RefType: spec.RefTypeBranch, Refspecs: []string{"+refs/heads/*:refs/origin/heads/*", "+refs/tags/*:refs/tags/*"}, } tests := map[string]struct { ffValue string testImage string assertOutput func(t *testing.T, output string) }{ "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR not set on root image": { ffValue: "", testImage: common.TestAlpineImage, assertOutput: umaskUsedUserNotChanged, }, "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR set explicitly to false on root image": { ffValue: "false", testImage: common.TestAlpineImage, assertOutput: umaskUsedUserNotChanged, }, "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR set to true on root image": { ffValue: "true", testImage: common.TestAlpineImage, assertOutput: umaskNotUsedUserNotChanged, }, "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR not set on non-root image": { ffValue: "", testImage: common.TestAlpineNoRootImage, assertOutput: umaskUsedUserNotChanged, }, "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR set explicitly to false on non-root image": { ffValue: "false", testImage: common.TestAlpineNoRootImage, assertOutput: umaskUsedUserNotChanged, }, "FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR set to true on non-root image": { ffValue: "true", testImage: common.TestAlpineNoRootImage, assertOutput: umaskNotUsedUserChanged, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { jobResponse, err := common.GetRemoteBuildResponse("ls -l") require.NoError(t, err) jobResponse.GitInfo = gitInfo jobResponse.Variables = append(jobResponse.Variables, spec.Variable{ Key: featureflags.DisableUmaskForDockerExecutor, Value: tt.ffValue, }) build := &common.Build{ Job: jobResponse, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: tt.testImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } output, err := buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) tt.assertOutput(t, output) }) } } func TestBuildLogLimitExceeded(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") buildtest.RunRemoteBuildWithJobOutputLimitExceeded(t, getRunnerConfigForOS(t), setupExecutor) } func TestCleanupProjectGitClone(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) buildtest.RunBuildWithCleanupGitClone(t, &build) } func TestCleanupProjectGitFetch(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") untrackedFilename := "untracked" build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands(untrackedFilename, "", "")..., ) }) buildtest.RunBuildWithCleanupGitFetch(t, &build, untrackedFilename) } func TestCleanupProjectGitSubmoduleNormal(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") untrackedFile := "untracked" untrackedSubmoduleFile := "untracked_submodule" build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands(untrackedFile, untrackedSubmoduleFile, "")..., ) }) buildtest.RunBuildWithCleanupNormalSubmoduleStrategy(t, &build, untrackedFile, untrackedSubmoduleFile) } func TestCleanupProjectGitSubmoduleRecursive(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") untrackedFile := "untracked" untrackedSubmoduleFile := "untracked_submodule" untrackedSubSubmoduleFile := "untracked_submodule_submodule" build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse( buildtest.GetNewUntrackedFileIntoSubmodulesCommands( untrackedFile, untrackedSubmoduleFile, untrackedSubSubmoduleFile)..., ) }) buildtest.RunBuildWithCleanupRecursiveSubmoduleStrategy( t, &build, untrackedFile, untrackedSubmoduleFile, untrackedSubSubmoduleFile, ) } func TestDockerCommandServiceVariables(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Variables = append(build.Job.Variables, spec.Variable{ Key: "FF_NETWORK_PER_BUILD", Value: "true", Public: true, }, spec.Variable{ Key: "BUILD_VAR", Value: "BUILD_VAR_VALUE", Public: true, }, ) shell := "sh" if runtime.GOOS == "windows" { shell = shells.SNPowershell } // immediately timeout as triggering an error is the only way to get a // service to send its output to the log build.Runner.Docker.WaitForServicesTimeout = 1 build.Services = spec.Services{ spec.Image{ Name: common.TestLivenessImage, Variables: []spec.Variable{ { Key: "SERVICE_VAR", Value: "SERVICE_VAR_VALUE", }, { Key: "SERVICE_VAR_REF_BUILD_VAR", Value: "$BUILD_VAR", }, }, Entrypoint: append([]string{shell, "-c"}, "echo SERVICE_VAR=$SERVICE_VAR SERVICE_VAR_REF_BUILD_VAR=$SERVICE_VAR_REF_BUILD_VAR"), }, } var buffer bytes.Buffer err := build.Run(&common.Config{}, &common.Trace{Writer: &buffer}) assert.NoError(t, err) out := buffer.String() assert.Contains(t, out, "SERVICE_VAR=SERVICE_VAR_VALUE") assert.Contains(t, out, "SERVICE_VAR_REF_BUILD_VAR=BUILD_VAR_VALUE") } func TestDockerCommandConflictingPullPolicies(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) successfulBuild.Image = spec.Image{Name: common.TestAlpineImage} build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } tests := map[string]struct { imagePullPolicies []common.DockerPullPolicy pullPolicy common.StringOrArray allowedPullPolicies []common.DockerPullPolicy wantErrRegex string }{ "allowed_pull_policies configured, default pull_policy": { imagePullPolicies: nil, pullPolicy: nil, allowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, wantErrRegex: `always.* Runner config \(default\) .*if-not-present`, }, "allowed_pull_policies and pull_policy configured": { imagePullPolicies: nil, pullPolicy: common.StringOrArray{common.PullPolicyNever}, allowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, wantErrRegex: `never.* Runner config .*if-not-present`, }, "allowed_pull_policies and image pull_policy configured": { imagePullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways}, pullPolicy: nil, allowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, wantErrRegex: `always.* GitLab pipeline config .*if-not-present`, }, "all configured": { imagePullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways}, pullPolicy: common.StringOrArray{common.PullPolicyNever}, allowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, wantErrRegex: `always.* GitLab pipeline config .*if-not-present`, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { build.Job.Image.PullPolicies = test.imagePullPolicies build.Runner.RunnerSettings.Docker.PullPolicy = test.pullPolicy build.Runner.RunnerSettings.Docker.AllowedPullPolicies = test.allowedPullPolicies gotErr := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) require.Error(t, gotErr) assert.Regexp(t, regexp.MustCompile(test.wantErrRegex), gotErr.Error()) assert.Contains(t, gotErr.Error(), `invalid pull policy for image "`+common.TestAlpineImage) }) } } func Test_CaptureServiceLogs(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { buildVars []spec.Variable assert func(*testing.T, string, error) }{ "enabled": { buildVars: []spec.Variable{ { Key: "CI_DEBUG_SERVICES", Value: "true", Public: true, }, { Key: "POSTGRES_PASSWORD", Value: "password", Public: true, }, }, assert: func(t *testing.T, out string, err error) { assert.NoError(t, err) assert.NotContains(t, out, "WARNING: CI_DEBUG_SERVICES: expected bool got \"blammo\", using default value: false") // Check for service prefixes and messages separately to handle interleaved output assert.Regexp(t, `\[service:(postgres-db|db-postgres)\]`, out) assert.Regexp(t, `The files belonging to this database system will be owned by user "postgres"`, out) assert.Regexp(t, `database system is ready to accept connections`, out) assert.Regexp(t, `\[service:(redis-cache|cache-redis)\]`, out) assert.Regexp(t, `oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0O`, out) assert.Regexp(t, `Ready to accept connections`, out) }, }, "not enabled": { assert: func(t *testing.T, out string, err error) { assert.NoError(t, err) assert.NotRegexp(t, `\[service:(postgres-db|db-postgres)\]`, out) assert.NotRegexp(t, `\[service:(redis-cache|cache-redis)\]`, out) assert.NotRegexp(t, `oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0O`, out) assert.NotRegexp(t, `Ready to accept connections`, out) }, }, "bogus value": { buildVars: []spec.Variable{{ Key: "CI_DEBUG_SERVICES", Value: "blammo", Public: true, }}, assert: func(t *testing.T, out string, err error) { assert.NoError(t, err) assert.Contains(t, out, "WARNING: CI_DEBUG_SERVICES: expected bool got \"blammo\", using default value: false") assert.NotRegexp(t, `\[service:(postgres-db|db-postgres)\]`, out) assert.NotRegexp(t, `\[service:(redis-cache|cache-redis)\]`, out) assert.NotRegexp(t, `oO0OoO0OoO0Oo Redis is starting oO0OoO0OoO0O`, out) assert.NotRegexp(t, `Ready to accept connections`, out) }, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Services = append(build.Services, spec.Image{Name: "postgres:14.4", Alias: "db"}) build.Services = append(build.Services, spec.Image{Name: "redis:7.0", Alias: "cache"}) build.Variables = tt.buildVars out, err := buildtest.RunBuildReturningOutput(t, &build) tt.assert(t, out, err) }) } } func Test_ExpandingVolumes(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") testScripts := map[string]map[string]string{ "non-windows": { "prepare": ` set -x testFile='%[1]s' test -e "$testFile" && { echo >&2 "Expected '$testFile' not to exist" exit 1 } echo '%[2]s' > "$testFile" `, "check": ` set -x testFile='%[1]s' test -e "$testFile" || { echo >&2 "Expected '$testFile' to exist" exit 1 } test '%[2]s' == "$(cat "$testFile")" `, }, "windows": { "prepare": ` Set-PSDebug -Trace 2 $testFile = '%[1]s' if ([System.IO.File]::Exists($testFile)) { $host.ui.WriteErrorLine("Expected '$testFile' not to exist") exit 1 } echo '%[2]s' > $testFile `, "check": ` Set-PSDebug -Trace 2 $testFile = '%[1]s' if (-not [System.IO.File]::Exists($testFile)) { $host.ui.WriteErrorLine("Expected '$testFile' to exist") exit 1 } if (-not (Get-Content $testFile).equals('%[2]s')) { exit 1 } `, }, } randString := strconv.Itoa(rand.Int()) runnerEnv := []string{"FOO=theFoo"} jobVariables := spec.Variables{ {Key: "SOME_VAR", Value: "${FOO}-${BAR}-theBlipp"}, {Key: "BAR", Value: "theBar"}, {Key: "RANDOM", Value: randString}, {Key: "GIT_STRATEGY", Value: string(common.GitNone)}, } volumes := []string{"/tmp/${SOME_VAR}/${RANDOM}"} testFile := filepath.Join("/tmp/theFoo-theBar-theBlipp", randString, "testFile") prepareScript := testScripts["non-windows"]["prepare"] checkScript := testScripts["non-windows"]["check"] if runtime.GOOS == test.OSWindows { volumes = []string{`c:\tmp\${SOME_VAR}\${RANDOM}`} testFile = filepath.Join(`c:\tmp\theFoo-theBar-theBlipp`, randString, "testFile") prepareScript = testScripts["windows"]["prepare"] checkScript = testScripts["windows"]["check"] } build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Job.Variables = jobVariables build.Runner.Docker.Volumes = volumes build.Runner.Environment = runnerEnv // ensures that the volume is mounted and can be written to. build.Job.Steps[0].Script[0] = fmt.Sprintf(prepareScript, testFile, randString) _, err := buildtest.RunBuildReturningOutput(t, &build) if !assert.NoError(t, err) { return } // ensures that the volume with same vars is cached/kept around, and the same volume is mounted again. build.Job.Steps[0].Script[0] = fmt.Sprintf(checkScript, testFile, randString) _, err = buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) } func Test_ContainerOptionsExpansion(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuildWithDumpedVariables() assert.NoError(t, err) jobVars := spec.Variables{ {Key: "CI_DEBUG_SERVICES", Value: "true", Public: true}, {Key: "POSTGRES_PASSWORD", Value: "password", Public: true}, {Key: "JOB_IMAGE", Value: "alpine:latest"}, {Key: "HELPER_IMAGE_FLAVOR", Value: "alpine"}, {Key: "SRVS_IMAGE", Value: "postgres:latest"}, {Key: "SRVS_IMAGE_ALIAS", Value: "db"}, } successfulBuild.Variables = append(successfulBuild.Variables, jobVars...) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: "$JOB_IMAGE", HelperImageFlavor: "$HELPER_IMAGE_FLAVOR", Services: []common.Service{ {Name: "$SRVS_IMAGE", Alias: "$SRVS_IMAGE_ALIAS"}, }, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } out, err := buildtest.RunBuildReturningOutput(t, build) assert.NoError(t, err) assert.Contains(t, out, "Pulling docker image alpine:latest") assert.Contains(t, out, "Pulling docker image postgres:latest") assert.Regexp(t, `\[service:(postgres-db|db-postgres)\]`, out) } func TestDockerCommandWithRunnerServiceEnvironmentVariables(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) // Adding a gitlab-ci.yml variable to test the expansion of the service env variables successfulBuild.Variables = append(successfulBuild.Variables, spec.Variable{ Key: "MY_GLOBAL_VAR", Value: "my_global_var_value", Public: true, }) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Services: []common.Service{ { Name: common.TestAlpineImage, Environment: []string{ // expanded service env var "EXPANDED=$MY_GLOBAL_VAR", "FOO=value from [[runners.docker.services]]", }, Entrypoint: []string{"/bin/sh", "-c"}, Command: []string{"echo -e \"FOO = $FOO\nEXPANDED = $EXPANDED\""}, }, }, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } out := bytes.NewBuffer(nil) err = build.Run(&common.Config{}, &common.Trace{Writer: out}) assert.NoError(t, err) assert.Contains(t, out.String(), "FOO = value from [[runners.docker.services]]") assert.Contains(t, out.String(), "EXPANDED = my_global_var_value") } func TestDockerBuildContainerGracefulShutdownNoInit(t *testing.T) { testDockerBuildContainerGracefulShutdown(t, false) } func TestDockerBuildContainerGracefulShutdownWithInit(t *testing.T) { testDockerBuildContainerGracefulShutdown(t, true) } func testDockerBuildContainerGracefulShutdown(t *testing.T, useInit bool) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]func(*common.Build, *common.Trace) func(){ // Comment this out for now. This test is flaky because the timeout includes docker image pull time, which will // always have outliers that exceed the entire job timeout. // "timeout exceeded": func(b *common.Build, _ *common.Trace) func() { // b.RunnerInfo.Timeout = 10 // return func() {} // }, "RUNNER_SCRIPT_TIMEOUT exceeded": func(b *common.Build, _ *common.Trace) func() { b.Variables = append(b.Variables, spec.Variable{ Key: "RUNNER_SCRIPT_TIMEOUT", Value: "2s", }) return func() {} }, "job cancelled": func(build *common.Build, tr *common.Trace) func() { return buildtest.OnStage(build, "step_", func() { time.Sleep(2 * time.Second) assert.True(t, tr.Cancel()) }) }, "job aborted": func(build *common.Build, tr *common.Trace) func() { return buildtest.OnStage(build, "step_", func() { time.Sleep(2 * time.Second) assert.True(t, tr.Abort()) }) }, } for name, testSetup := range tests { t.Run(name, func(t *testing.T) { successfulBuild, err := common.GetRemoteBuildResponse("./long-script-with-cleanup.sh") assert.NoError(t, err) successfulBuild.GitInfo.Sha = "6353879af977aed75f7f75b7f8084a5cb6f1177a" build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: "alpine:latest", PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } if useInit { build.Variables = append(build.Variables, spec.Variable{ Key: "FF_USE_INIT_WITH_DOCKER_EXECUTOR", Value: "true", }) } out := bytes.NewBuffer(nil) trace := common.Trace{Writer: out} defer testSetup(build, &trace)() err = build.Run(&common.Config{}, &trace) assert.Error(t, err) assert.EventuallyWithT(t, func(t *assert.CollectT) { assert.Regexp(t, `(?m)^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\d{2}O\s+Starting [0-9]{1,2}`, out.String()) assert.Regexp(t, `(?m)^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\d{2}O\s+Caught SIGTERM`, out.String()) assert.Regexp(t, `(?m)^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\d{2}O\s+Exiting [0-9]{1,2}`, out.String()) }, 5*time.Second, 1*time.Second) }) } } func Test_FF_USE_INIT_WITH_DOCKER_EXECUTOR(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]bool{ "use init": true, "do not use init": false, } for name, useInit := range tests { t.Run(name, func(t *testing.T) { successfulBuild, err := common.GetRemoteBuildResponse("ps -A") assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: "alpine:latest", PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } if useInit { build.Variables = append(build.Variables, spec.Variable{ Key: "FF_USE_INIT_WITH_DOCKER_EXECUTOR", Value: "true", }) } out := bytes.NewBuffer(nil) assert.NoError(t, build.Run(&common.Config{}, &common.Trace{Writer: out})) if useInit { assert.Regexp(t, "1 root 0:00 /sbin/docker-init --", out.String()) } else { assert.NotRegexp(t, "1 root 0:00 /sbin/docker-init --", out.String()) } }) } } func Test_ServiceLabels(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") client, err := docker.New(docker.Credentials{}) require.NoError(t, err) defer client.Close() wg := sync.WaitGroup{} wg.Add(1) expectedLabels := map[string]string{ // default labels "com.gitlab.gitlab-runner.job.before_sha": "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7", "com.gitlab.gitlab-runner.job.id": "0", "com.gitlab.gitlab-runner.job.ref": "main", "com.gitlab.gitlab-runner.job.sha": "69b18e5ed3610cf646119c3e38f462c64ec462b7", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.job.url": "https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test/-/jobs/0", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.pipeline.id": "", "com.gitlab.gitlab-runner.project.id": "0", "com.gitlab.gitlab-runner.project.runner_id": "0", "com.gitlab.gitlab-runner.runner.id": "", "com.gitlab.gitlab-runner.runner.local_id": "0", "com.gitlab.gitlab-runner.runner.system_id": "", "com.gitlab.gitlab-runner.service": "redis", "com.gitlab.gitlab-runner.service.version": "7.0", "com.gitlab.gitlab-runner.type": "service", // from user-defined config "FOO": "FOO", "my.custom.label.BAR": "BAR", // NOTE: these are only here for backwards-compatibility // see https://gitlab.com/gitlab-org/gitlab-runner/-/issues/39048 "com.gitlab.gitlab-runner.FOO": "FOO", "com.gitlab.gitlab-runner.my.custom.label.BAR": "BAR", } go func() { defer wg.Done() // wait for service container to appear and get its name nameFilter := filters.NewArgs(filters.Arg("name", "redis-0")) containerList := container.ListOptions{Filters: nameFilter} var container string require.Eventually(t, func() bool { list, err := client.ContainerList(context.Background(), containerList) require.NoError(t, err) if len(list) != 1 { return false } container = list[0].ID return true }, time.Second*10, time.Millisecond*500) // inspect container and assert expected labels exist... info, err := client.ContainerInspect(context.Background(), container) require.NoError(t, err) assert.Equal(t, expectedLabels, info.Config.Labels) }() successfulBuild, err := common.GetRemoteBuildResponse("sleep 3") successfulBuild.Services = spec.Services{{Name: "redis:7.0", Alias: "service-1"}} assert.NoError(t, err) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: "alpine:latest", ContainerLabels: map[string]string{ "FOO": "FOO", "my.custom.label.BAR": "BAR", }, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) assert.NoError(t, err) wg.Wait() } func Test_ServiceVolumeMounts(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { devices []string assert func(string) }{ "no device bindings": { assert: func(out string) { assert.Contains(t, out, "ls: /test: No such file or directory") }, }, "with device bindings": { devices: []string{"/dev/:/test/:ro"}, assert: func(out string) { assert.NotContains(t, out, "ls: /test: No such file or directory") assert.Contains(t, out, "tty") assert.Contains(t, out, "cpu") }, }, } build := getBuildForOS(t, common.GetRemoteSuccessfulBuild) build.Services = append(build.Services, spec.Image{ Name: "alpine:latest", Entrypoint: []string{"ls", "/test"}, }) for name, tt := range tests { t.Run(name, func(t *testing.T) { build.Runner.Docker.ServicesDevices = nil if len(tt.devices) != 0 { build.Runner.Docker.ServicesDevices = map[string][]string{ "alpine:*": tt.devices, } } out, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) tt.assert(out) }) } } func TestDockerCommandWithPlatform(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") test.SkipIfDockerDaemonAPIVersionNotAtLeast(t, minDockerDaemonVersion) successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) // leave platform empty successfulBuild.Image.Name = common.TestAlpineImage successfulBuild.Services = spec.Services{ { Name: "redis:7.0", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{Platform: "amd64"}, }, }, { Name: "postgres:14.4", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{Platform: "arm64"}, // this image will download but fail to run, which is OK. }, }, } build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{}, }, }, ExecutorProvider: docker_executor.NewProvider(), } err = build.Run(&common.Config{}, &common.Trace{Writer: &bytes.Buffer{}}) require.NoError(t, err) images := map[string]string{ "redis:7.0": "amd64", "postgres:14.4": "arm64", // unspecified platform defaults to host arch common.TestAlpineImage: runtime.GOARCH, } client, err := docker.New(docker.Credentials{}) require.NoError(t, err) defer client.Close() for img, arch := range images { info, _, err := client.ImageInspectWithRaw(context.Background(), img) require.NoError(t, err) assert.Equal(t, arch, info.Architecture) } } func TestDockerCommandWithUser(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") test.SkipIfDockerDaemonAPIVersionNotAtLeast(t, minDockerDaemonVersion) successfulBuild, err := common.GetRemoteBuildResponse("whoami") require.NoError(t, err) successfulBuild.Steps[0].Name = "wait" successfulBuild.Image.Name = common.TestAlpineImage successfulBuild.Image.ExecutorOptions.Docker.User = "squid" build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{}, }, }, ExecutorProvider: docker_executor.NewProvider(), } var buffer bytes.Buffer require.NoError(t, build.Run(&common.Config{}, &common.Trace{Writer: &buffer})) assert.Regexp(t, "whoami.*\n.*squid", buffer.String()) } // TestGitCredHelper assert that the git cred helper works with the docker executor, with the container images we ship // with. func TestGitCredHelper(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") const ( repoURLWithSubmodules = "https://gitlab.com/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/submodules/mixed-submodules-test" repoShaWithSubmodules = "0a1093ff08de939dbd1625689d86deef18126a74" ) submodules := []string{"private-repo-relative", "public-repo-ssh"} build := getBuildForOS(t, func() (spec.Job, error) { jobResponse, err := common.GetRemoteSuccessfulBuild() jobResponse.GitInfo.RepoURL = repoURLWithSubmodules jobResponse.GitInfo.Sha = repoShaWithSubmodules jobResponse.Variables.Set( spec.Variable{Key: "GIT_SUBMODULE_PATHS", Value: strings.Join(submodules, " ")}, spec.Variable{Key: "GIT_SUBMODULE_STRATEGY", Value: string(common.SubmoduleRecursive)}, spec.Variable{Key: "GIT_SUBMODULE_FORCE_HTTPS", Value: "1"}, spec.Variable{Key: "CI_SERVER_HOST", Value: "gitlab.com"}, ) buildtest.InjectJobTokenFromEnv(t, &jobResponse) return jobResponse, err }) buildtest.SetBuildFeatureFlag(&build, featureflags.GitURLsWithoutTokens, true) build.Runner.RunnerCredentials.URL = "https://gitlab.com/" _, err := buildtest.RunBuildReturningOutput(t, &build) assert.NoError(t, err) } // TestPwshGitCredHelper ensures that the git credential helper, rendered by the shellwriter, works correctly across // different versions of pwsh, specifically the ones we have special implementation for. // We use the plain upstream powershell images. This has the side effect, that we have to install git as part of the // build. func TestPwshGitCredHelper(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") const ( // run the "main" test script with debugging enabled debug = false // for windows: where to get MinGit minGitURL = "https://github.com/git-for-windows/git/releases/download/v2.49.0.windows.1/MinGit-2.49.0-64-bit.zip" ) tests := map[string]struct { image string withNativeArgPassing bool }{ "7.1": {image: "mcr.microsoft.com/powershell:7.1.5-%s"}, "7.2": {image: "mcr.microsoft.com/powershell:7.2-%s"}, "7.2-nativeArgPassing": {image: "mcr.microsoft.com/powershell:7.2-%s", withNativeArgPassing: true}, "7.3": {image: "mcr.microsoft.com/powershell:7.3-%s"}, } gitInstaller := "&{ apt-get update -y ; apt-get install -y git } | Out-Null" basePath := `/tmp/foo` imageMapper := func(i string) string { return fmt.Sprintf(i, "debian-11") } if runtime.GOOS == test.OSWindows { // The tests on windows take ages, because of the huge images, see: // https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5525#note_2493164643 // As a middle ground we only run the windows tests across those pwsh versions. // Code still left in, in case we want to enable those eventually. t.Skip("Windows tests disabled in favour of pipeline performance, see: https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/5525#note_2493487328") gitInstaller = `&{` + `$dest = "C:\Program Files\Git"; $ProgressPreference = 'SilentlyContinue'; ` + `Invoke-WebRequest -Uri "${minGitURL}" -OutFile "$env:TEMP\mingit.zip"; ` + `Expand-Archive -Path "$env:TEMP\mingit.zip" -DestinationPath "$dest" -Force; ` + `$env:Path += ";${dest}\cmd"; ` + `[Environment]::SetEnvironmentVariable("Path", $env:Path, [System.EnvironmentVariableTarget]::User); ` + `$env:GIT_CONFIG_NOSYSTEM=1; git config --system --unset-all include.path; ` + `}` basePath = `c:\tmp\foo` imageMapper = func(i string) string { return getWindowsImage(t, i, map[string]string{ windows.V1809: "nanoserver-1809", windows.V21H2: "windowsservercore-ltsc2022", }) } } for name, tc := range tests { t.Run(name, func(t *testing.T) { t.Parallel() var w shells.ShellWriter = &shells.PsWriter{Shell: shells.SNPwsh, EOL: "\n"} confFile := w.Join(basePath, "cred.conf") // setup empty repo w.MkDir(basePath) w.Cd(basePath) w.Command("git", "init", "--quiet") // setup global caching git cred helper w.Command("git", "config", "--global", "credential.helper", "store") // inject invalid creds into global cred helper w.Line(`echo "url=https://invalidUser:invalidPass@foo.bar/repo" | git credential approve`) // configure the custom cred helper and include it locally w.SetupGitCredHelper(confFile, "credential", "some-user") w.Command("git", "config", "include.path", confFile) // dump out the creds w.Line(`echo "url=https://foo.bar/repo" | git credential fill`) script := w.Finish(debug) build := getBuildForOS(t, func() (spec.Job, error) { cmds := []string{gitInstaller} if debug { cmds = append(cmds, "Set-PSDebug -Trace 2", "$env:GIT_TRACE=2") } cmds = append(cmds, script) return common.GetRemoteBuildResponse(cmds...) }) build.Runner.Docker.Image = imageMapper(tc.image) build.Runner.Docker.DisableCache = true build.Runner.Shell = shells.SNPwsh build.Variables = append(build.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "none"}, spec.Variable{Key: "minGitURL", Value: minGitURL}, ) // with native arg passing, we need to enable the experimental feature in a separate shell session, // thus we prepend a step enabling the feature and run the actual script in a separate step if tc.withNativeArgPassing { build.Steps = append([]spec.Step{{ Name: "enable_experimental_feature", Script: spec.StepScript{`Enable-ExperimentalFeature -Name PSNativeCommandArgumentPassing`}, }}, build.Steps...) } out, err := buildtest.RunBuildReturningOutput(t, &build) require.NoError(t, err) usernamePattern := regexp.MustCompile(`\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\S+\s+username=some-user\n`) assert.Regexp(t, usernamePattern, out) passwordPattern := regexp.MustCompile(`\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\s+\S+\s+password=test-job-token\n`) assert.Regexp(t, passwordPattern, out) if tc.withNativeArgPassing { assert.Contains(t, out, "WARNING: Enabling and disabling experimental features do not take effect until next start of PowerShell.", "expected the experimental feature 'PSNativeCommandArgumentPassing' to be enabled", ) } }) } } func TestDockerCommand_MacAddressConfig(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") test.SkipIfDockerDaemonAPIVersionNotAtLeast(t, minDockerDaemonVersion) macAddress := "92:d0:c6:0a:29:33" apiVersionAtLeast1_44, err := test.IsDockerDaemonAPIVersionAtLeast("1.44") require.NoError(t, err) type testCase struct { networkMode string networkPerBuild bool expectedRunErr bool validate func(*testing.T, types.ContainerJSON) } tests := map[string]testCase{ "empty (user defined), network per build enabled": {networkMode: "", networkPerBuild: true, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, "", info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Contains(t, k, "runner-") assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, "empty (user defined), network per build disabled": {networkMode: "", networkPerBuild: false, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, macAddress, info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "bridge", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, "default, network per build enabled": {networkMode: "default", networkPerBuild: true, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, macAddress, info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "bridge", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, "default, network per build disabled": {networkMode: "default", networkPerBuild: false, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, macAddress, info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "bridge", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, "bridge, network per build enabled": {networkMode: "bridge", networkPerBuild: true, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, macAddress, info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "bridge", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, "bridge, network per build disabled": {networkMode: "bridge", networkPerBuild: false, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, macAddress, info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "bridge", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }}, // the cases below fail with "exit code 1" when run in a CI pipeline, and "conflicting options: mac-address and // the network mode" when run locally. "none, network per build enabled": {networkMode: "none", networkPerBuild: true, expectedRunErr: true}, "none, network per build disabled": {networkMode: "none", networkPerBuild: false, expectedRunErr: true}, "host, network per build enabled": { networkMode: "host", networkPerBuild: true, expectedRunErr: !apiVersionAtLeast1_44, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, "", info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "host", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }, }, "host, network per build disabled": { networkMode: "host", networkPerBuild: false, expectedRunErr: !apiVersionAtLeast1_44, validate: func(t *testing.T, info types.ContainerJSON) { assert.Equal(t, "", info.NetworkSettings.MacAddress, "net settings") assert.Len(t, info.NetworkSettings.Networks, 1) for k, v := range info.NetworkSettings.Networks { assert.Equal(t, "host", k) assert.Equal(t, macAddress, v.MacAddress, k+" network") } }, }, } // we'll make some direct docker API calls in this tests... client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") defer client.Close() ctx := context.Background() for name, tc := range tests { t.Run(name, func(t *testing.T) { runnerID := 987654321 // make a build according to the test case parameters... rc := getRunnerConfigForOS(t) rc.Docker.MacAddress = macAddress rc.Docker.NetworkMode = tc.networkMode build := getBuildForOS(t, func() (spec.Job, error) { return common.GetRemoteBuildResponse("sleep 3") }) build.Runner = rc build.ProjectRunnerID = runnerID build.Variables = append(build.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: strconv.FormatBool(tc.networkPerBuild), }) wg := sync.WaitGroup{} wg.Add(1) defer wg.Wait() // wait for build job to finish go func(t *testing.T, tc testCase) { defer wg.Done() // run the build... err := build.Run(&common.Config{}, &common.Trace{Writer: &bytes.Buffer{}}) if tc.expectedRunErr { assert.Error(t, err, "running build") } else { require.NoError(t, err, "running build") } }(t, tc) if tc.expectedRunErr { // we expect build.Run to fail so there's noting else to do... return } re := regexp.MustCompile("runner-.*-project-0-concurrent-" + strconv.Itoa(runnerID) + "-.*-build") var ctr types.Container // wait for the build container to be created... require.Eventually(t, func() bool { list, err := client.ContainerList(ctx, container.ListOptions{}) assert.NoError(t, err, "listing containers") for _, l := range list { for _, n := range l.Names { if re.MatchString(n) { ctr = l return true } } } return false }, time.Second*10, time.Millisecond*500) // inspect the build container to examine the MacAddress configuration info, err := client.ContainerInspect(ctx, ctr.ID) assert.NoError(t, err, "inspecting container %q", ctr.ID) tc.validate(t, info) }) } } func Test_CacheVolumeProtected(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { protectedRef bool cacheKey string expectProtectedVolume bool }{ "not protected ref, not protected cache key": {false, "blammo", false}, "not protected ref, non_protected cache key": {false, "blammo-non_protected", false}, "protected ref, not protected cache key": {true, "blammo", true}, "not protected ref, protected cache key": {false, "blammo-protected", true}, "protected ref, protected cache key": {true, "blammo-protected", true}, } for name, tt := range tests { t.Run(name, func(t *testing.T) { successfulBuild, err := common.GetRemoteSuccessfulBuild() assert.NoError(t, err) successfulBuild.GitInfo.Protected = &tt.protectedRef successfulBuild.JobInfo.ProjectID = time.Now().Unix() successfulBuild.Cache = spec.Caches{ spec.Cache{ Key: tt.cacheKey, Paths: spec.ArtifactPaths{"cached/*"}, }, } build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: common.TestAlpineImage, PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Volumes: []string{"/cache"}, }, Cache: &cacheconfig.Config{}, }, }, ExecutorProvider: docker_executor.NewProvider(), } // Run a job. We only care that the cache volume is created. _, err = buildtest.RunBuildReturningOutput(t, build) require.NoError(t, err) client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "creating docker client") defer client.Close() // Inspect the created cache volume vols, err := client.VolumeList(context.Background(), volume.ListOptions{ Filters: filters.NewArgs(filters.KeyValuePair{Key: "name", Value: build.ProjectRealUniqueName()}), }) require.NoError(t, err) assert.Len(t, vols.Volumes, 1) vol := vols.Volumes[0] assert.Equal(t, vol.Labels["com.gitlab.gitlab-runner.type"], "cache", "volume label 'com.gitlab.gitlab-runner.type' should be 'cache'") assert.Equal(t, vol.Labels["com.gitlab.gitlab-runner.destination"], "/cache", "volume label 'com.gitlab.gitlab-runner.destination' should be '/cache'") if tt.expectProtectedVolume { assert.True(t, strings.HasSuffix(vol.Name, "-protected"), "volume name should end in '-protected'") assert.Equal(t, vol.Labels["com.gitlab.gitlab-runner.protected"], "true", "volume label 'com.gitlab.gitlab-runner.protected' should be 'true'") } else { assert.False(t, strings.HasSuffix(vol.Name, "-protected"), "volume name should NOT end in '-protected'") assert.Equal(t, vol.Labels["com.gitlab.gitlab-runner.protected"], "false", "volume label 'com.gitlab.gitlab-runner.protected' should be 'false'") } }) } } func setupExecutor(t *testing.T, build *common.Build) { build.ExecutorProvider = docker_executor.NewProvider() } ================================================ FILE: executors/docker/docker_log_options_integration_test.go ================================================ //go:build integration package docker_test import ( "os" "runtime" "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers" ) func TestDockerLogOptions(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") tests := map[string]struct { skip map[bool]string logOptions map[string]string services spec.Services expectedErrRE string }{ "invalid key rejected early": { logOptions: map[string]string{ "max-size": "10m", }, expectedErrRE: "invalid log options: only \\[\"env\" \"labels\"] are allowed, but found: \\[\"max-size\"\\]", }, "multiple invalid keys rejected early": { logOptions: map[string]string{ "max-size": "10m", "max-file": "3", "invalid-option-1": "value1", }, expectedErrRE: "invalid log options: only \\[\"env\" \"labels\"] are allowed, but found: \\[\"invalid-option-1\" \"max-file\" \"max-size\"\\]", }, "valid env configuration": { logOptions: map[string]string{ "env": "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", }, }, "valid labels configuration": { logOptions: map[string]string{ "labels": "com.gitlab.gitlab-runner.type", }, }, "valid env and labels configuration": { logOptions: map[string]string{ "env": "GITLAB_CI_JOB_ID,GITLAB_CI_JOB_NAME", "labels": "com.gitlab.gitlab-runner.type", }, }, "empty configuration": { logOptions: map[string]string{}, }, "service container with invalid options": { skip: map[bool]string{ runtime.GOOS == "windows": "Service containers work differently on Windows", }, logOptions: map[string]string{ "max-size": "10m", }, services: spec.Services{ spec.Image{ Name: common.TestAlpineImage, }, }, expectedErrRE: "invalid log options: only \\[\"env\" \"labels\"] are allowed, but found: \\[\"max-size\"\\]", }, } for name, test := range tests { t.Run(name, func(t *testing.T) { // Check if test should be skipped for condition, reason := range test.skip { if condition { t.Skip(reason) } } build := getBuildForOS(t, common.GetSuccessfulBuild) build.Runner.Docker.LogOptions = test.logOptions // Configure services if specified if len(test.services) > 0 { build.Job.Services = test.services } build.Job.Variables = append( build.Job.Variables, spec.Variable{Key: "GIT_STRATEGY", Value: "none"}, ) err := build.Run(&common.Config{}, &common.Trace{Writer: os.Stdout}) if test.expectedErrRE == "" { assert.NoError(t, err) } else { var eerr *common.BuildError assert.ErrorAs(t, err, &eerr) assert.Equal(t, common.RunnerSystemFailure, eerr.FailureReason) assert.Regexp(t, test.expectedErrRE, eerr.Inner.Error()) } }) } } ================================================ FILE: executors/docker/docker_steps_integration_test.go ================================================ //go:build integration package docker_test import ( "testing" "github.com/stretchr/testify/assert" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildtest" "gitlab.com/gitlab-org/gitlab-runner/common/spec" docker_executor "gitlab.com/gitlab-org/gitlab-runner/executors/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/test" ) var successAlwaysWantOut = []string{ `Executing "step_run" stage of the job script`, "Job succeeded", } func Test_StepsIntegration(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) helpers.SkipIntegrationTests(t, "docker", "info") test.SkipIfVariable(t, "CI_SKIP_STEPS_TESTS") tests := map[string]struct { steps string variables spec.Variables services spec.Services wantOut []string wantErr bool }{ "script": { steps: `- name: echo script: echo foo bar baz - name: ls script: ls -lh - name: env script: env`, wantOut: []string{ "foo bar baz", "PWD=/builds/gitlab-org/ci-cd/gitlab-runner-pipeline-tests/gitlab-test", }, }, "remote step": { steps: `- name: echo step: "https://gitlab.com/gitlab-org/ci-cd/runner-tools/echo-step@v5" inputs: echo: foo bar baz`, wantOut: []string{"foo bar baz"}, }, "local step": { steps: `- name: localecho step: "./steps/echo" inputs: message: foo bar baz`, wantOut: []string{"foo bar baz"}, }, "file variable": { steps: `- name: cat script: cat ${{ job.A_FILE_VAR }}`, variables: spec.Variables{{Key: "A_FILE_VAR", Value: "oh this is soo secret", File: true}}, wantOut: []string{"oh this is soo secret"}, }, "job variables should not appear in environment": { steps: `- name: echo script: echo ${{ env.FLIN_FLAN_FLON }}`, variables: spec.Variables{{Key: "FLIN_FLAN_FLON", Value: "flin, flan, flon"}}, wantOut: []string{ "ERROR: Job failed:", `evaluating expression failed at ".FLIN_FLAN_FLON": attribute not found`, }, wantErr: true, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { successfulBuild, err := common.GetRemoteStepsBuildResponse(tt.steps) assert.NoError(t, err) successfulBuild.Services = tt.services successfulBuild.Variables = append(successfulBuild.Variables, tt.variables...) build := &common.Build{ Job: successfulBuild, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Executor: "docker", Docker: &common.DockerConfig{ Image: "fedora:latest", PullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, Privileged: true, }, }, }, ExecutorProvider: docker_executor.NewProvider(), } wantOut := tt.wantOut out, err := buildtest.RunBuildReturningOutput(t, build) if !tt.wantErr { assert.NoError(t, err) wantOut = append(wantOut, successAlwaysWantOut...) } else { assert.Error(t, err) } for _, want := range wantOut { assert.Contains(t, out, want) } }) } } ================================================ FILE: executors/docker/docker_test.go ================================================ //go:build !integration package docker import ( "bufio" "context" "errors" "fmt" "io" "net" "os" "path/filepath" "regexp" "runtime" "strings" "testing" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/image" "github.com/docker/docker/api/types/network" "github.com/docker/docker/api/types/system" "github.com/docker/docker/api/types/volume" "github.com/docker/go-units" "github.com/hashicorp/go-version" v1 "github.com/opencontainers/image-spec/specs-go/v1" "github.com/sirupsen/logrus" logrustest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/step-runner/schema/v1" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/networks" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/prebuilt" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/pull" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/user" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/permission" "gitlab.com/gitlab-org/gitlab-runner/helpers/container/helperimage" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" "gitlab.com/gitlab-org/gitlab-runner/helpers/test" "gitlab.com/gitlab-org/gitlab-runner/shells" ) func TestParseDeviceStringOne(t *testing.T) { e := new(executor) device, err := e.parseDeviceString("/dev/kvm") assert.NoError(t, err) assert.Equal(t, "/dev/kvm", device.PathOnHost) assert.Equal(t, "/dev/kvm", device.PathInContainer) assert.Equal(t, "rwm", device.CgroupPermissions) } func TestParseDeviceStringTwo(t *testing.T) { e := new(executor) device, err := e.parseDeviceString("/dev/kvm:/devices/kvm") assert.NoError(t, err) assert.Equal(t, "/dev/kvm", device.PathOnHost) assert.Equal(t, "/devices/kvm", device.PathInContainer) assert.Equal(t, "rwm", device.CgroupPermissions) } func TestParseDeviceStringThree(t *testing.T) { e := new(executor) device, err := e.parseDeviceString("/dev/kvm:/devices/kvm:r") assert.NoError(t, err) assert.Equal(t, "/dev/kvm", device.PathOnHost) assert.Equal(t, "/devices/kvm", device.PathInContainer) assert.Equal(t, "r", device.CgroupPermissions) } func TestParseDeviceStringFour(t *testing.T) { e := new(executor) _, err := e.parseDeviceString("/dev/kvm:/devices/kvm:r:oops") assert.Error(t, err) } func TestBindDeviceRequests(t *testing.T) { tests := []struct { gpus string expectedDeviceRequest []container.DeviceRequest expectedErr bool }{ { gpus: "all", expectedDeviceRequest: []container.DeviceRequest{ { Driver: "", Count: -1, DeviceIDs: nil, Capabilities: [][]string{{"gpu"}}, Options: map[string]string{}, }, }, }, { gpus: "", expectedDeviceRequest: nil, }, { gpus: "somestring=thatshouldtriggeranerror", expectedDeviceRequest: nil, expectedErr: true, }, } for _, tt := range tests { t.Run(tt.gpus, func(t *testing.T) { e := executor{ AbstractExecutor: executors.AbstractExecutor{ Config: common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: &common.DockerConfig{ Gpus: tt.gpus, }, }, }, }, } err := e.bindDeviceRequests() if tt.expectedErr { require.Error(t, err) return } require.NoError(t, err) require.Equal(t, tt.expectedDeviceRequest, e.deviceRequests) }) } } type testAllowedImageDescription struct { allowed bool image string allowedImages []string } var testAllowedImages = []testAllowedImageDescription{ {true, "ruby", []string{"*"}}, {true, "ruby:3.3", []string{"*"}}, {true, "ruby:latest", []string{"*"}}, {true, "library/ruby", []string{"*/*"}}, {true, "library/ruby:3.3", []string{"*/*"}}, {true, "library/ruby:3.3", []string{"*/*:*"}}, {true, "my.registry.tld/library/ruby", []string{"my.registry.tld/*/*"}}, {true, "my.registry.tld/library/ruby:3.3", []string{"my.registry.tld/*/*:*"}}, {true, "my.registry.tld/group/subgroup/ruby", []string{"my.registry.tld/*/*/*"}}, {true, "my.registry.tld/group/subgroup/ruby:3.3", []string{"my.registry.tld/*/*/*:*"}}, {true, "ruby", []string{"**/*"}}, {true, "ruby:3.3", []string{"**/*"}}, {true, "ruby:latest", []string{"**/*"}}, {true, "library/ruby", []string{"**/*"}}, {true, "library/ruby:3.3", []string{"**/*"}}, {true, "library/ruby:3.3", []string{"**/*:*"}}, {true, "my.registry.tld/library/ruby", []string{"my.registry.tld/**/*"}}, {true, "my.registry.tld/library/ruby:3.3", []string{"my.registry.tld/**/*:*"}}, {true, "my.registry.tld/group/subgroup/ruby", []string{"my.registry.tld/**/*"}}, {true, "my.registry.tld/group/subgroup/ruby:3.3", []string{"my.registry.tld/**/*:*"}}, {false, "library/ruby", []string{"*"}}, {false, "library/ruby:3.3", []string{"*"}}, {false, "my.registry.tld/ruby", []string{"*"}}, {false, "my.registry.tld/ruby:3.3", []string{"*"}}, {false, "my.registry.tld/library/ruby", []string{"*"}}, {false, "my.registry.tld/library/ruby:3.3", []string{"*"}}, {false, "my.registry.tld/group/subgroup/ruby", []string{"*"}}, {false, "my.registry.tld/group/subgroup/ruby:3.3", []string{"*"}}, {false, "library/ruby", []string{"*/*:*"}}, {false, "my.registry.tld/group/subgroup/ruby", []string{"my.registry.tld/*/*"}}, {false, "my.registry.tld/group/subgroup/ruby:3.3", []string{"my.registry.tld/*/*:*"}}, {false, "library/ruby", []string{"**/*:*"}}, } func TestVerifyAllowedImage(t *testing.T) { e := new(executor) e.BuildLogger = buildlogger.New(nil, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}) for _, test := range testAllowedImages { err := e.verifyAllowedImage(test.image, "", test.allowedImages, []string{}) if err != nil && test.allowed { t.Errorf("%q must be allowed by %q", test.image, test.allowedImages) } else if err == nil && !test.allowed { t.Errorf("%q must not be allowed by %q", test.image, test.allowedImages) } } } func TestIsInAllowedPrivilegedImages(t *testing.T) { for _, test := range testAllowedImages { res := isInAllowedPrivilegedImages(test.image, test.allowedImages) if !res && test.allowed { t.Errorf("%q must be allowed by %q", test.image, test.allowedImages) } else if res && !test.allowed { t.Errorf("%q must not be allowed by %q", test.image, test.allowedImages) } } } func executorWithMockClient(c *docker.MockClient) *executor { mockConnector := func(ctx context.Context, options common.ExecutorPrepareOptions, e *executor) error { e.dockerConn = &dockerConnection{Client: c} e.info = system.Info{OSType: helperimage.OSTypeLinux} return nil } e := &executor{ dockerConnector: mockConnector, } e.Context = context.Background() e.Build = new(common.Build) return e } func TestHelperImageWithVariable(t *testing.T) { c := docker.NewMockClient(t) p := pull.NewMockManager(t) runnerImageTag := "gitlab/gitlab-runner:" + common.AppVersion.Revision p.On("GetDockerImage", runnerImageTag, spec.ImageDockerOptions{}, []common.DockerPullPolicy(nil)). Return(&image.InspectResponse{ID: "helper-image"}, nil). Once() e := executorWithMockClient(c) e.pullManager = p e.Config = common.RunnerConfig{} e.Config.Docker = &common.DockerConfig{ HelperImage: "gitlab/gitlab-runner:${CI_RUNNER_REVISION}", } img, err := e.getHelperImage() assert.NoError(t, err) require.NotNil(t, img) assert.Equal(t, "helper-image", img.ID) } func TestPrepareBuildsDir(t *testing.T) { tests := map[string]struct { dontSetupVolumeParser bool rootDir string volumes []string expectedSharedBuildsDir bool expectedError string }{ "rootDir mounted as host based volume": { rootDir: "/build", volumes: []string{"/build:/build"}, expectedSharedBuildsDir: true, }, "rootDir mounted as container based volume": { rootDir: "/build", volumes: []string{"/build"}, expectedSharedBuildsDir: false, }, "rootDir not mounted as volume": { rootDir: "/build", volumes: []string{"/folder:/folder"}, expectedSharedBuildsDir: false, }, "rootDir's parent mounted as volume": { rootDir: "/build/other/directory", volumes: []string{"/build/:/build"}, expectedSharedBuildsDir: true, }, "rootDir is not an absolute path": { rootDir: "builds", expectedError: "build directory needs to be an absolute path", }, "rootDir is /": { rootDir: "/", expectedError: "build directory needs to be a non-root path", }, "error on volume parsing": { rootDir: "/build", volumes: []string{""}, expectedError: "invalid volume specification", }, "error on volume parser creation": { dontSetupVolumeParser: true, expectedError: `missing volume parser`, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { c := common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ BuildsDir: test.rootDir, Docker: &common.DockerConfig{ Volumes: test.volumes, }, }, } build := &common.Build{} build.Variables = spec.Variables{} options := common.ExecutorPrepareOptions{ Config: &c, } e := &executor{ AbstractExecutor: executors.AbstractExecutor{ Build: build, Config: c, }, } if !test.dontSetupVolumeParser { e.volumeParser = parser.NewLinuxParser(e.ExpandValue) } err := e.prepareBuildsDir(options) if test.expectedError != "" { assert.Error(t, err) assert.Contains(t, err.Error(), test.expectedError) return } assert.NoError(t, err) assert.Equal(t, test.expectedSharedBuildsDir, e.SharedBuildsDir) }) } } type volumesTestCase struct { volumes []string buildsDir string gitStrategy string adjustConfiguration func(e *executor) volumesManagerAssertions func(*volumes.MockManager) clientAssertions func(*docker.MockClient) createVolumeManager bool expectedError error } var ( volumesTestsDefaultBuildsDir = "/default-builds-dir" volumesTestsDefaultCacheDir = "/default-cache-dir" ) func getExecutorForVolumesTests(t *testing.T, test volumesTestCase) *executor { e := &executor{} e.serverAPIVersion = version.Must(version.NewVersion("1.43")) clientMock := docker.NewMockClient(t) clientMock.On("Close").Return(nil).Once() dockerConn := &dockerConnection{Client: clientMock} e.dockerConn = dockerConn volumesManagerMock := volumes.NewMockManager(t) if !errors.Is(test.expectedError, errVolumesManagerUndefined) { volumesManagerMock.On("RemoveTemporary", mock.Anything).Return(nil).Once() } oldCreateVolumesManager := createVolumesManager t.Cleanup(func() { e.Cleanup() createVolumesManager = oldCreateVolumesManager }) createVolumesManager = func(_ *executor) (volumes.Manager, error) { return volumesManagerMock, nil } if test.volumesManagerAssertions != nil { test.volumesManagerAssertions(volumesManagerMock) } if test.clientAssertions != nil { test.clientAssertions(clientMock) } c := common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "abcdef1234567890", }, RunnerSettings: common.RunnerSettings{ BuildsDir: test.buildsDir, Docker: &common.DockerConfig{ Volumes: test.volumes, }, }, } logger, _ := logrustest.NewNullLogger() e.AbstractExecutor = executors.AbstractExecutor{ BuildLogger: buildlogger.New(&common.Trace{Writer: io.Discard}, logger.WithField("test", t.Name()), buildlogger.Options{}), Build: &common.Build{ ProjectRunnerID: 0, Runner: &c, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 0, }, GitInfo: spec.GitInfo{ RepoURL: "https://gitlab.example.com/group/project.git", }, }, }, Config: c, ExecutorOptions: executors.ExecutorOptions{ DefaultBuildsDir: volumesTestsDefaultBuildsDir, DefaultCacheDir: volumesTestsDefaultCacheDir, }, } e.dockerConn = &dockerConnection{Client: clientMock} e.info = system.Info{ OSType: helperimage.OSTypeLinux, } e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: "GIT_STRATEGY", Value: test.gitStrategy, }) if test.adjustConfiguration != nil { test.adjustConfiguration(e) } err := e.Build.StartBuild( e.RootDir(), e.CacheDir(), e.CustomBuildEnabled(), e.SharedBuildsDir, false, ) require.NoError(t, err) if test.createVolumeManager { err = e.createVolumesManager() require.NoError(t, err) } return e } func TestCreateVolumes(t *testing.T) { tests := map[string]volumesTestCase{ "volumes manager not created": { expectedError: errVolumesManagerUndefined, }, "no volumes defined, empty buildsDir, clone strategy, no errors": { gitStrategy: "clone", createVolumeManager: true, }, "no volumes defined, defined buildsDir, clone strategy, no errors": { buildsDir: "/builds", gitStrategy: "clone", createVolumeManager: true, }, "no volumes defined, defined buildsDir, fetch strategy, no errors": { buildsDir: "/builds", gitStrategy: "fetch", createVolumeManager: true, }, "volumes defined, empty buildsDir, clone strategy, no errors on user volume": { volumes: []string{"/volume"}, gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/volume"). Return(nil). Once() }, createVolumeManager: true, }, "volumes defined, empty buildsDir, clone strategy, duplicated error on user volume": { volumes: []string{"/volume"}, gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/volume"). Return(volumes.NewErrVolumeAlreadyDefined("/volume")). Once() }, createVolumeManager: true, expectedError: volumes.NewErrVolumeAlreadyDefined("/volume"), }, "volumes defined, empty buildsDir, clone strategy, other error on user volume": { volumes: []string{"/volume"}, gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/volume"). Return(errors.New("test-error")). Once() }, createVolumeManager: true, expectedError: errors.New("test-error"), }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { e := getExecutorForVolumesTests(t, test) err := e.createVolumes() assert.Equal(t, test.expectedError, err) }) } } func TestCreateBuildVolume(t *testing.T) { tests := map[string]volumesTestCase{ "volumes manager not created": { expectedError: errVolumesManagerUndefined, }, "git strategy clone, empty buildsDir, no error": { gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, volumesTestsDefaultBuildsDir). Return(nil). Once() }, createVolumeManager: true, }, "git strategy clone, empty buildsDir, duplicated error": { gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, volumesTestsDefaultBuildsDir). Return(volumes.NewErrVolumeAlreadyDefined(volumesTestsDefaultBuildsDir)). Once() }, createVolumeManager: true, }, "git strategy clone, empty buildsDir, other error": { gitStrategy: "clone", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, volumesTestsDefaultBuildsDir). Return(errors.New("test-error")). Once() }, createVolumeManager: true, expectedError: errors.New("test-error"), }, "git strategy clone, non-empty buildsDir, no error": { gitStrategy: "clone", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, "/builds"). Return(nil). Once() }, createVolumeManager: true, }, "git strategy clone, non-empty buildsDir, duplicated error": { gitStrategy: "clone", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, "/builds"). Return(volumes.NewErrVolumeAlreadyDefined("/builds")). Once() }, createVolumeManager: true, }, "git strategy clone, non-empty buildsDir, other error": { gitStrategy: "clone", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("CreateTemporary", mock.Anything, "/builds"). Return(errors.New("test-error")). Once() }, createVolumeManager: true, expectedError: errors.New("test-error"), }, "git strategy fetch, empty buildsDir, no error": { gitStrategy: "fetch", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, volumesTestsDefaultBuildsDir). Return(nil). Once() }, createVolumeManager: true, }, "git strategy fetch, empty buildsDir, duplicated error": { gitStrategy: "fetch", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, volumesTestsDefaultBuildsDir). Return(volumes.NewErrVolumeAlreadyDefined(volumesTestsDefaultBuildsDir)). Once() }, createVolumeManager: true, }, "git strategy fetch, empty buildsDir, other error": { gitStrategy: "fetch", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, volumesTestsDefaultBuildsDir). Return(errors.New("test-error")). Once() }, createVolumeManager: true, expectedError: errors.New("test-error"), }, "git strategy fetch, non-empty buildsDir, no error": { gitStrategy: "fetch", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/builds"). Return(nil). Once() }, createVolumeManager: true, }, "git strategy fetch, non-empty buildsDir, duplicated error": { gitStrategy: "fetch", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/builds"). Return(volumes.NewErrVolumeAlreadyDefined("/builds")). Once() }, createVolumeManager: true, }, "git strategy fetch, non-empty buildsDir, wrapped duplicated error": { gitStrategy: "fetch", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/builds"). Return(fmt.Errorf("wrap: %w", volumes.NewErrVolumeAlreadyDefined("/builds"))). Once() }, createVolumeManager: true, }, "git strategy fetch, non-empty buildsDir, other error": { gitStrategy: "fetch", buildsDir: "/builds", volumesManagerAssertions: func(vm *volumes.MockManager) { vm.On("Create", mock.Anything, "/builds"). Return(errors.New("test-error")). Once() }, createVolumeManager: true, expectedError: errors.New("test-error"), }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { e := getExecutorForVolumesTests(t, test) err := e.createBuildVolume() assert.Equal(t, test.expectedError, err) }) } } func TestCreateDependencies(t *testing.T) { const containerID = "container-ID" containerNameRegex, err := regexp.Compile("runner-abcdef123-project-0-concurrent-0-[^-]+-alpine-0") require.NoError(t, err) containerNameMatcher := mock.MatchedBy(containerNameRegex.MatchString) testError := errors.New("test-error") testCase := volumesTestCase{ buildsDir: "/builds", volumes: []string{"/volume"}, adjustConfiguration: func(e *executor) { e.Build.Services = append(e.Build.Services, spec.Image{ Name: "alpine:latest", }) e.BuildShell = &common.ShellConfiguration{} }, volumesManagerAssertions: func(vm *volumes.MockManager) { binds := make([]string, 0) vm.On("CreateTemporary", mock.Anything, "/builds"). Return(nil). Run(func(args mock.Arguments) { binds = append(binds, args.Get(1).(string)) }). Once() vm.On("Create", mock.Anything, "/volume"). Return(nil). Run(func(args mock.Arguments) { binds = append(binds, args.Get(1).(string)) }). Once() vm.On("Binds"). Return(func() []string { return binds }). Once() }, clientAssertions: func(c *docker.MockClient) { hostConfigMatcher := mock.MatchedBy(func(conf *container.HostConfig) bool { return assert.Equal(t, []string{"/volume", "/builds"}, conf.Binds) }) c.On("ImageInspectWithRaw", mock.Anything, "alpine:latest"). Return(image.InspectResponse{}, nil, nil). Once() c.On("NetworkList", mock.Anything, mock.Anything). Return(nil, nil). Times(2) c.On("ContainerRemove", mock.Anything, containerNameMatcher, mock.Anything). Return(nil). Once() c.On("ContainerRemove", mock.Anything, containerID, mock.Anything). Return(nil). Once() c.On( "ContainerCreate", mock.Anything, mock.Anything, hostConfigMatcher, mock.Anything, mock.AnythingOfType("*v1.Platform"), containerNameMatcher, ). Return(container.CreateResponse{ID: containerID}, nil). Once() c.On("ContainerStart", mock.Anything, containerID, mock.Anything). Return(testError). Once() }, } e := getExecutorForVolumesTests(t, testCase) err = e.createDependencies() assert.Equal(t, testError, err) } type containerConfigExpectations func(*testing.T, *container.Config, *container.HostConfig, *network.NetworkingConfig) type dockerConfigurationTestFakeDockerClient struct { *docker.MockClient cce containerConfigExpectations t *testing.T } func (c *dockerConfigurationTestFakeDockerClient) ContainerCreate( ctx context.Context, config *container.Config, hostConfig *container.HostConfig, networkingConfig *network.NetworkingConfig, platform *v1.Platform, containerName string, ) (container.CreateResponse, error) { c.cce(c.t, config, hostConfig, networkingConfig) return container.CreateResponse{ID: "abc"}, nil } func createExecutorForTestDockerConfiguration( t *testing.T, dockerConfig *common.DockerConfig, cce containerConfigExpectations, ) (*dockerConfigurationTestFakeDockerClient, *executor) { c := &dockerConfigurationTestFakeDockerClient{ cce: cce, t: t, } c.MockClient = docker.NewMockClient(t) e := new(executor) e.dockerConn = &dockerConnection{Client: c} e.info = system.Info{ OSType: helperimage.OSTypeLinux, Architecture: "amd64", } e.BuildLogger = buildlogger.New(nil, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}) e.Config.Docker = dockerConfig e.Build = &common.Build{ Runner: &common.RunnerConfig{}, } e.Build.Token = "abcd123456" e.BuildShell = &common.ShellConfiguration{} var err error e.helperImageInfo, err = helperimage.Get(common.AppVersion.Version, helperimage.Config{ OSType: e.info.OSType, Architecture: e.info.Architecture, KernelVersion: e.info.KernelVersion, }) require.NoError(t, err) err = e.createLabeler() require.NoError(t, err) e.serverAPIVersion = version.Must(version.NewVersion("1.43")) return c, e } func prepareTestDockerConfiguration( t *testing.T, dockerConfig *common.DockerConfig, cce containerConfigExpectations, expectedInspectImage string, expectedPullImage string, //nolint:unparam ) (*dockerConfigurationTestFakeDockerClient, *executor) { c, e := createExecutorForTestDockerConfiguration(t, dockerConfig, cce) c.On("ImageInspectWithRaw", mock.Anything, expectedInspectImage). Return(image.InspectResponse{ID: "123"}, []byte{}, nil).Twice() c.On("ImagePullBlocking", mock.Anything, expectedPullImage, mock.Anything). Return(nil).Once() c.On("NetworkList", mock.Anything, mock.Anything). Return([]network.Summary{}, nil).Once() c.On("ContainerRemove", mock.Anything, mock.Anything, mock.Anything). Return(nil).Once() return c, e } func testDockerConfigurationWithJobContainer( t *testing.T, dockerConfig *common.DockerConfig, cce containerConfigExpectations, ) { c, e := prepareTestDockerConfiguration(t, dockerConfig, cce, "alpine", "alpine:latest") c.On("ContainerInspect", mock.Anything, "abc"). Return(container.InspectResponse{}, nil).Once() err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) imageConfig := spec.Image{Name: "alpine"} cfgTor := newDefaultContainerConfigurator(e, buildContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) assert.NoError(t, err, "Should create container without errors") } func testDockerConfigurationWithPredefinedContainer( t *testing.T, dockerConfig *common.DockerConfig, cce containerConfigExpectations, ) { c, e := prepareTestDockerConfiguration(t, dockerConfig, cce, "alpine", "alpine:latest") c.On("ContainerInspect", mock.Anything, "abc"). Return(container.InspectResponse{}, nil).Once() err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) imageConfig := spec.Image{Name: "alpine"} cfgTor := newDefaultContainerConfigurator(e, predefinedContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) assert.NoError(t, err, "Should create container without errors") } func TestDockerMemorySetting(t *testing.T) { dockerConfig := &common.DockerConfig{ Memory: "42m", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(44040192), hostConfig.Memory) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerMemorySwapSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ MemorySwap: "2g", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(2147483648), hostConfig.MemorySwap) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerMemoryReservationSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ MemoryReservation: "64m", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(67108864), hostConfig.MemoryReservation) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerCPUSSetting(t *testing.T) { examples := []struct { cpus string nanocpus int64 }{ {"0.5", 500000000}, {"0.25", 250000000}, {"1/3", 333333333}, {"1/8", 125000000}, {"0.0001", 100000}, } for _, example := range examples { t.Run(example.cpus, func(t *testing.T) { dockerConfig := &common.DockerConfig{ CPUS: example.cpus, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, example.nanocpus, hostConfig.NanoCPUs) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) }) } } func TestDockerIsolationWithCorrectValues(t *testing.T) { isolations := []string{"default", ""} if runtime.GOOS == helperimage.OSTypeWindows { isolations = append(isolations, "hyperv", "process") } for _, isolation := range isolations { t.Run(isolation, func(t *testing.T) { dockerConfig := &common.DockerConfig{ Isolation: isolation, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, container.Isolation(isolation), hostConfig.Isolation) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) }) } } func TestDockerIsolationWithIncorrectValue(t *testing.T) { dockerConfig := &common.DockerConfig{ Isolation: "someIncorrectValue", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { } _, executor := createExecutorForTestDockerConfiguration(t, dockerConfig, cce) _, err := executor.createHostConfig(false, false) assert.Contains(t, err.Error(), `the isolation value "someIncorrectValue" is not valid`) } func TestDockerServiceContainerConfigIncludesDockerLabels(t *testing.T) { dockerConfig := &common.DockerConfig{ HelperImage: "gitlab/gitlab-runner:${CI_RUNNER_REVISION}", ContainerLabels: map[string]string{"my.custom.dockerConfigLabel": "dockerConfigLabelValue"}, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { } _, executor := createExecutorForTestDockerConfiguration(t, dockerConfig, cce) containerConfig := executor.createServiceContainerConfig("postgres", "15-alpine", "abc123def456", spec.Image{Name: "postgres:15-alpine"}) expectedLabels := map[string]string{ // default labels "com.gitlab.gitlab-runner.job.before_sha": "", "com.gitlab.gitlab-runner.job.id": "0", "com.gitlab.gitlab-runner.job.ref": "", "com.gitlab.gitlab-runner.job.sha": "", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.job.url": "/-/jobs/0", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.pipeline.id": "", "com.gitlab.gitlab-runner.project.id": "0", "com.gitlab.gitlab-runner.project.runner_id": "0", "com.gitlab.gitlab-runner.runner.id": "", "com.gitlab.gitlab-runner.runner.local_id": "0", "com.gitlab.gitlab-runner.runner.system_id": "", "com.gitlab.gitlab-runner.service": "postgres", "com.gitlab.gitlab-runner.service.version": "15-alpine", "com.gitlab.gitlab-runner.type": "service", // from user-defined config "my.custom.dockerConfigLabel": "dockerConfigLabelValue", // NOTE: this is only here for backwards-compatibility // see https://gitlab.com/gitlab-org/gitlab-runner/-/issues/39048 "com.gitlab.gitlab-runner.my.custom.dockerConfigLabel": "dockerConfigLabelValue", } assert.Equal(t, expectedLabels, containerConfig.Labels) } func TestDockerMacAddress(t *testing.T) { dockerConfig := &common.DockerConfig{ MacAddress: "92:d0:c6:0a:29:33", } cce := func(t *testing.T, _ *container.Config, _ *container.HostConfig, netConfig *network.NetworkingConfig) { for _, ec := range netConfig.EndpointsConfig { assert.Equal(t, "92:d0:c6:0a:29:33", ec.MacAddress) } } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerCgroupParentSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ CgroupParent: "test-docker-cgroup", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "test-docker-cgroup", hostConfig.CgroupParent) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerCPUSetCPUsSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ CPUSetCPUs: "1-3,5", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "1-3,5", hostConfig.CpusetCpus) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerCPUSetMemsSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ CPUSetMems: "1-3,5", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "1-3,5", hostConfig.CpusetMems) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerServiceSettings(t *testing.T) { tests := map[string]struct { dockerConfig common.DockerConfig verifyFn func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) }{ "memory": { dockerConfig: common.DockerConfig{ ServiceMemory: "42m", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { value, err := units.RAMInBytes("42m") require.NoError(t, err) assert.Equal(t, value, hostConfig.Memory) }, }, "memory reservation": { dockerConfig: common.DockerConfig{ ServiceMemoryReservation: "64m", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { value, err := units.RAMInBytes("64m") require.NoError(t, err) assert.Equal(t, value, hostConfig.MemoryReservation) }, }, "swap": { dockerConfig: common.DockerConfig{ ServiceMemorySwap: "2g", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { value, err := units.RAMInBytes("2g") require.NoError(t, err) assert.Equal(t, value, hostConfig.MemorySwap) }, }, "CgroupParent": { dockerConfig: common.DockerConfig{ ServiceCgroupParent: "test-docker-cgroup", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "test-docker-cgroup", hostConfig.CgroupParent) }, }, "CPUSetCPUs": { dockerConfig: common.DockerConfig{ ServiceCPUSetCPUs: "1-3,5", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "1-3,5", hostConfig.CpusetCpus) }, }, "cpus_0.5": { dockerConfig: common.DockerConfig{ ServiceCPUS: "0.5", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(500000000), hostConfig.NanoCPUs) }, }, "cpus_0.25": { dockerConfig: common.DockerConfig{ ServiceCPUS: "0.25", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(250000000), hostConfig.NanoCPUs) }, }, "cpus_1/3": { dockerConfig: common.DockerConfig{ ServiceCPUS: "1/3", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(333333333), hostConfig.NanoCPUs) }, }, "cpus_1/8": { dockerConfig: common.DockerConfig{ ServiceCPUS: "1/8", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(125000000), hostConfig.NanoCPUs) }, }, "cpus_0.0001": { dockerConfig: common.DockerConfig{ ServiceCPUS: "0.0001", }, verifyFn: func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(100000), hostConfig.NanoCPUs) }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { testDockerConfigurationWithServiceContainer(t, &tt.dockerConfig, tt.verifyFn) }) } } func TestDockerContainerLabelsSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ ContainerLabels: map[string]string{"my.custom.label": "my.custom.value"}, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { expected := map[string]string{ "com.gitlab.gitlab-runner.job.before_sha": "", "com.gitlab.gitlab-runner.job.id": "0", "com.gitlab.gitlab-runner.job.ref": "", "com.gitlab.gitlab-runner.job.sha": "", "com.gitlab.gitlab-runner.job.url": "/-/jobs/0", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.pipeline.id": "", "com.gitlab.gitlab-runner.project.id": "0", "com.gitlab.gitlab-runner.project.runner_id": "0", "com.gitlab.gitlab-runner.runner.id": "", "com.gitlab.gitlab-runner.runner.local_id": "0", "com.gitlab.gitlab-runner.runner.system_id": "", "com.gitlab.gitlab-runner.type": "build", "my.custom.label": "my.custom.value", } assert.Equal(t, expected, config.Labels) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerTmpfsSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ Tmpfs: map[string]string{ "/tmpfs": "rw,noexec", }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { require.NotEmpty(t, hostConfig.Tmpfs) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerServicesDevicesSetting(t *testing.T) { tests := map[string]struct { devices map[string][]string expectedDeviceMappings []container.DeviceMapping }{ "same host and container path": { devices: map[string][]string{ "alpine:*": {"/dev/usb:/dev/usb:ro"}, "alp*": {"/dev/kvm", "/dev/dri"}, "nomatch": {"/dev/null"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/usb", PathInContainer: "/dev/usb", CgroupPermissions: "ro", }, { PathOnHost: "/dev/kvm", PathInContainer: "/dev/kvm", CgroupPermissions: "rwm", }, { PathOnHost: "/dev/dri", PathInContainer: "/dev/dri", CgroupPermissions: "rwm", }, }, }, "different host and container path": { devices: map[string][]string{ "alpine:*": {"/dev/usb:/dev/xusb:ro"}, "alp*": {"/dev/kvm:/dev/xkvm", "/dev/dri"}, "nomatch": {"/dev/null"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/usb", PathInContainer: "/dev/xusb", CgroupPermissions: "ro", }, { PathOnHost: "/dev/kvm", PathInContainer: "/dev/xkvm", CgroupPermissions: "rwm", }, { PathOnHost: "/dev/dri", PathInContainer: "/dev/dri", CgroupPermissions: "rwm", }, }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { dockerConfig := &common.DockerConfig{ ServicesDevices: tt.devices, } cce := func(ttt *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { require.NotEmpty(ttt, hostConfig.Resources.Devices) assert.ElementsMatch(ttt, tt.expectedDeviceMappings, hostConfig.Resources.Devices) } testDockerConfigurationWithServiceContainer(t, dockerConfig, cce) }) } } func TestDockerGetServicesDevices(t *testing.T) { tests := map[string]struct { image string devices map[string][]string expectedDeviceMappings []container.DeviceMapping expectedErrorSubstr string }{ "matching image": { image: "alpine:latest", devices: map[string][]string{ "alpine:*": {"/dev/null"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/null", PathInContainer: "/dev/null", CgroupPermissions: "rwm", }, }, expectedErrorSubstr: "", }, "one matching image": { image: "alpine:latest", devices: map[string][]string{ "alpine:*": {"/dev/null"}, "fedora:*": {"/dev/usb"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/null", PathInContainer: "/dev/null", CgroupPermissions: "rwm", }, }, expectedErrorSubstr: "", }, "multiple matching images": { image: "alpine:latest", devices: map[string][]string{ "alpine:*": {"/dev/null"}, "alpine:latest": {"/dev/usb"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/null", PathInContainer: "/dev/null", CgroupPermissions: "rwm", }, { PathOnHost: "/dev/usb", PathInContainer: "/dev/usb", CgroupPermissions: "rwm", }, }, expectedErrorSubstr: "", }, "no devices": { image: "alpine:latest", devices: map[string][]string{ "alpine:*": {}, }, expectedDeviceMappings: nil, expectedErrorSubstr: "", }, "no matching image": { image: "alpine:latest", devices: map[string][]string{ "ubuntu:*": {"/dev/null"}, }, expectedDeviceMappings: nil, expectedErrorSubstr: "", }, "devices is nil": { image: "alpine:latest", devices: nil, expectedDeviceMappings: nil, expectedErrorSubstr: "", }, "multiple devices": { image: "private.registry:5000/emulator/OSv7:26", devices: map[string][]string{ "private.registry:5000/emulator/*": {"/dev/kvm", "/dev/dri"}, }, expectedDeviceMappings: []container.DeviceMapping{ { PathOnHost: "/dev/kvm", PathInContainer: "/dev/kvm", CgroupPermissions: "rwm", }, { PathOnHost: "/dev/dri", PathInContainer: "/dev/dri", CgroupPermissions: "rwm", }, }, expectedErrorSubstr: "", }, "parseDeviceString error": { image: "alpine:latest", devices: map[string][]string{ "alpine:*": {"/dev/null::::"}, }, expectedDeviceMappings: nil, expectedErrorSubstr: "too many colons", }, "bad glob pattern": { image: "alpine:latest", devices: map[string][]string{ "alpin[e:*": {"/dev/usb:/dev/usb:ro"}, }, expectedErrorSubstr: "invalid service device image pattern: alpin[e", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { e := &executor{} e.Config.Docker = &common.DockerConfig{ ServicesDevices: tt.devices, } mappings, err := e.getServicesDevices(tt.image) if tt.expectedErrorSubstr != "" { assert.Contains(t, fmt.Sprintf("%+v", err), tt.expectedErrorSubstr) return } assert.ElementsMatch(t, tt.expectedDeviceMappings, mappings) }) } } func TestDockerServicesDeviceRequestsSetting(t *testing.T) { tests := map[string]struct { gpus string expectedDeviceRequests []container.DeviceRequest }{ "request all GPUs": { gpus: "all", expectedDeviceRequests: []container.DeviceRequest{ { Driver: "", Count: -1, DeviceIDs: nil, Capabilities: [][]string{{"gpu"}}, Options: map[string]string{}, }, }, }, "gpus is empty string": { gpus: "", expectedDeviceRequests: nil, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { dockerConfig := &common.DockerConfig{ ServiceGpus: tt.gpus, } cce := func(ttt *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.ElementsMatch(ttt, tt.expectedDeviceRequests, hostConfig.Resources.DeviceRequests) } testDockerConfigurationWithServiceContainer(t, dockerConfig, cce) }) } } func TestDockerGetServicesDeviceRequests(t *testing.T) { tests := map[string]struct { gpus string expectedDeviceRequests []container.DeviceRequest expectedErrorSubstr string }{ "request all GPUs": { gpus: "all", expectedDeviceRequests: []container.DeviceRequest{ { Driver: "", Count: -1, DeviceIDs: nil, Capabilities: [][]string{{"gpu"}}, Options: map[string]string{}, }, }, expectedErrorSubstr: "", }, "request GPUs by device ID": { gpus: "\"device=1,2\"", expectedDeviceRequests: []container.DeviceRequest{ { Driver: "", Count: 0, DeviceIDs: []string{"1", "2"}, Capabilities: [][]string{{"gpu"}}, Options: map[string]string{}, }, }, expectedErrorSubstr: "", }, "request GPUs by count": { gpus: "2", expectedDeviceRequests: []container.DeviceRequest{ { Driver: "", Count: 2, DeviceIDs: nil, Capabilities: [][]string{{"gpu"}}, Options: map[string]string{}, }, }, expectedErrorSubstr: "", }, "gpus is empty string": { gpus: "", expectedDeviceRequests: nil, expectedErrorSubstr: "", }, "parse gpus string error": { gpus: "somestring=thatshouldtriggeranerror", expectedDeviceRequests: nil, expectedErrorSubstr: "unexpected key 'somestring' in 'somestring=thatshouldtriggeranerror'", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { e := &executor{} e.Config.Docker = &common.DockerConfig{ ServiceGpus: tt.gpus, } deviceRequests, err := e.getServicesDeviceRequests() if tt.expectedErrorSubstr != "" { assert.Contains(t, fmt.Sprintf("%+v", err), tt.expectedErrorSubstr) return } require.NoError(t, err) require.Equal(t, tt.expectedDeviceRequests, deviceRequests) }) } } func TestDockerUserSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ User: "www", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "www", config.User) } ccePredefined := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "", config.User) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) testDockerConfigurationWithPredefinedContainer(t, dockerConfig, ccePredefined) } func TestDockerUserNSSetting(t *testing.T) { dockerConfig := &common.DockerConfig{} dockerConfigWithHostUsernsMode := &common.DockerConfig{ UsernsMode: "host", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, container.UsernsMode(""), hostConfig.UsernsMode) } cceWithHostUsernsMode := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, container.UsernsMode("host"), hostConfig.UsernsMode) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) testDockerConfigurationWithJobContainer(t, dockerConfigWithHostUsernsMode, cceWithHostUsernsMode) } func TestDockerRuntimeSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ Runtime: "runc", } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "runc", hostConfig.Runtime) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerSysctlsSetting(t *testing.T) { dockerConfig := &common.DockerConfig{ SysCtls: map[string]string{ "net.ipv4.ip_forward": "1", }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "1", hostConfig.Sysctls["net.ipv4.ip_forward"]) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerUlimitSetting(t *testing.T) { dockerConfig := &common.DockerConfig{} tests := map[string]struct { ulimit map[string]string expectedUlimit []*units.Ulimit expectedError bool }{ "soft and hard values": { ulimit: map[string]string{ "nofile": "1024:2048", }, expectedUlimit: []*units.Ulimit{ { Name: "nofile", Soft: 1024, Hard: 2048, }, }, expectedError: false, }, "single limit value": { ulimit: map[string]string{ "nofile": "1024", }, expectedUlimit: []*units.Ulimit{ { Name: "nofile", Soft: 1024, Hard: 1024, }, }, expectedError: false, }, "invalid limit value": { ulimit: map[string]string{ "nofile": "a", }, expectedError: true, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { dockerConfig.Ulimit = test.ulimit ulimits, err := dockerConfig.GetUlimits() if test.expectedError { assert.Error(t, err) return } assert.Equal(t, ulimits, test.expectedUlimit) }) } } type testAllowedPrivilegedJobDescription struct { expectedPrivileged bool privileged bool allowedImages []string } var testAllowedPrivilegedJob = []testAllowedPrivilegedJobDescription{ {true, true, []string{}}, {true, true, []string{"*"}}, {false, true, []string{"*:*"}}, {false, true, []string{"*/*"}}, {false, true, []string{"*/*:*"}}, {true, true, []string{"**/*"}}, {false, true, []string{"**/*:*"}}, {true, true, []string{"alpine"}}, {false, true, []string{"debian"}}, {true, true, []string{"alpi*"}}, {true, true, []string{"*alpi*"}}, {true, true, []string{"*alpi*"}}, {true, true, []string{"debian", "alpine"}}, {true, true, []string{"debian", "*"}}, {false, false, []string{}}, {false, false, []string{"*"}}, {false, false, []string{"*:*"}}, {false, false, []string{"*/*"}}, {false, false, []string{"*/*:*"}}, {false, false, []string{"**/*"}}, {false, false, []string{"**/*:*"}}, {false, false, []string{"alpine"}}, {false, false, []string{"debian"}}, {false, false, []string{"alpi*"}}, {false, false, []string{"*alpi*"}}, {false, false, []string{"*alpi*"}}, {false, false, []string{"debian", "alpine"}}, {false, false, []string{"debian", "*"}}, } func TestDockerPrivilegedJobSetting(t *testing.T) { for _, test := range testAllowedPrivilegedJob { dockerConfig := &common.DockerConfig{ Privileged: test.privileged, AllowedPrivilegedImages: test.allowedImages, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { var message string if test.expectedPrivileged { message = "%q must be allowed by %q" } else { message = "%q must not be allowed by %q" } assert.Equal(t, test.expectedPrivileged, hostConfig.Privileged, message, "alpine", test.allowedImages) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } } type networksTestCase struct { clientAssertions func(*docker.MockClient) networksManagerAssertions func(*networks.MockManager) createNetworkManager bool networkPerBuild string expectedBuildError error expectedCleanError error } func TestDockerCreateNetwork(t *testing.T) { testErr := errors.New("test-err") tests := map[string]networksTestCase{ "networks manager not created": { networkPerBuild: "false", expectedBuildError: errNetworksManagerUndefined, expectedCleanError: errNetworksManagerUndefined, }, "network not created": { createNetworkManager: true, networkPerBuild: "false", networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("test"), nil). Once() nm.On("Inspect", mock.Anything). Return(network.Inspect{}, nil). Once() nm.On("Cleanup", mock.Anything). Return(nil). Once() }, }, "network created": { createNetworkManager: true, networkPerBuild: "true", networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("test"), nil). Once() nm.On("Inspect", mock.Anything). Return(network.Inspect{}, nil). Once() nm.On("Cleanup", mock.Anything). Return(nil). Once() }, }, "network creation failed": { createNetworkManager: true, networkPerBuild: "true", networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("fail"), testErr). Once() }, expectedBuildError: testErr, }, "network inspect failed": { createNetworkManager: true, networkPerBuild: "true", networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("test"), nil). Once() nm.On("Inspect", mock.Anything). Return(network.Inspect{}, testErr). Once() }, expectedCleanError: nil, }, "removing container failed": { createNetworkManager: true, networkPerBuild: "true", clientAssertions: func(c *docker.MockClient) { c.On("NetworkList", mock.Anything, mock.Anything). Return([]network.Summary{}, nil). Once() c.On("ContainerRemove", mock.Anything, mock.Anything, mock.Anything). Return(testErr). Once() }, networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("test"), nil). Once() nm.On("Inspect", mock.Anything). Return( network.Inspect{ Containers: map[string]network.EndpointResource{ "abc": {}, }, }, nil, ). Once() nm.On("Cleanup", mock.Anything). Return(nil). Once() }, expectedCleanError: nil, }, "network cleanup failed": { createNetworkManager: true, networkPerBuild: "true", networksManagerAssertions: func(nm *networks.MockManager) { nm.On("Create", mock.Anything, mock.Anything, mock.Anything). Return(container.NetworkMode("test"), nil). Once() nm.On("Inspect", mock.Anything). Return(network.Inspect{}, nil). Once() nm.On("Cleanup", mock.Anything). Return(testErr). Once() }, expectedCleanError: testErr, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { e := getExecutorForNetworksTests(t, test) err := e.createBuildNetwork() assert.ErrorIs(t, err, test.expectedBuildError) err = e.cleanupNetwork(t.Context()) assert.ErrorIs(t, err, test.expectedCleanError) }) } } func getExecutorForNetworksTests(t *testing.T, test networksTestCase) *executor { t.Helper() clientMock := docker.NewMockClient(t) networksManagerMock := networks.NewMockManager(t) oldCreateNetworksManager := createNetworksManager t.Cleanup(func() { createNetworksManager = oldCreateNetworksManager }) createNetworksManager = func(_ *executor) (networks.Manager, error) { return networksManagerMock, nil } if test.networksManagerAssertions != nil { test.networksManagerAssertions(networksManagerMock) } if test.clientAssertions != nil { test.clientAssertions(clientMock) } c := common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "abcdef1234567890", }, } c.Docker = &common.DockerConfig{ NetworkMode: "", } e := &executor{ AbstractExecutor: executors.AbstractExecutor{ BuildLogger: buildlogger.New(nil, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}), Build: &common.Build{ ProjectRunnerID: 0, Runner: &c, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 0, }, GitInfo: spec.GitInfo{ RepoURL: "https://gitlab.example.com/group/project.git", }, }, }, Config: c, ExecutorOptions: executors.ExecutorOptions{ DefaultBuildsDir: volumesTestsDefaultBuildsDir, DefaultCacheDir: volumesTestsDefaultCacheDir, }, }, dockerConn: &dockerConnection{Client: clientMock}, info: system.Info{ OSType: helperimage.OSTypeLinux, }, } e.Context = t.Context() e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: test.networkPerBuild, }) if test.createNetworkManager { err := e.createNetworksManager() require.NoError(t, err) } return e } func TestCheckOSType(t *testing.T) { cases := map[string]struct { dockerInfoOSType string expectedErr string }{ "linux type": { dockerInfoOSType: osTypeLinux, }, "windows type": { dockerInfoOSType: osTypeWindows, }, "freebsd type": { dockerInfoOSType: osTypeFreeBSD, }, "unknown": { dockerInfoOSType: "foobar", expectedErr: "unsupported os type: foobar", }, } for name, c := range cases { t.Run(name, func(t *testing.T) { e := executor{ info: system.Info{ OSType: c.dockerInfoOSType, }, AbstractExecutor: executors.AbstractExecutor{}, } err := validateOSType(e.info) if c.expectedErr == "" { assert.NoError(t, err) return } assert.EqualError(t, err, c.expectedErr) }) } } func TestHelperImageRegistry(t *testing.T) { tests := map[string]struct { config *common.DockerConfig // We only validate the name because we only care if the right image is // used. We don't want to end up having this test as a "spellcheck" to // make sure tags and commands are generated correctly since that is // done at a unit level already and we would be duplicating internal // logic and leaking abstractions. expectedHelperImageName string }{ "Default helper image": { config: &common.DockerConfig{}, expectedHelperImageName: helperimage.GitLabRegistryName, }, "helper image overridden still use default helper image in prepare": { config: &common.DockerConfig{ HelperImage: "private.registry.com/helper", }, // We expect the default image to still be chosen since the check of // the override happens at a later stage. expectedHelperImageName: helperimage.GitLabRegistryName, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { e := &executor{ AbstractExecutor: executors.AbstractExecutor{ ExecutorOptions: executors.ExecutorOptions{}, }, newVolumePermissionSetter: func() (permission.Setter, error) { return nil, nil }, } e.Build = &common.Build{} e.info = system.Info{ OSType: "linux", } e.Config.Docker = tt.config helperImageInfo, err := e.prepareHelperImage() require.NoError(t, err) assert.Equal(t, tt.expectedHelperImageName, helperImageInfo.Name) }) } } func TestLocalHelperImage(t *testing.T) { imageName := func(prefix, suffix string) string { return fmt.Sprintf("%s:%s%s%s", helperimage.GitLabRegistryName, prefix, "x86_64-latest", suffix) } createFakePrebuiltImages(t, "x86_64") tests := map[string]struct { jobVariables spec.Variables config helperimage.Config clientAssertions func(*docker.MockClient) expectedImage *image.InspectResponse }{ "docker import using registry.gitlab.com name": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageImportBlocking", mock.Anything, mock.Anything, helperimage.GitLabRegistryName, image.ImportOptions{ Tag: "x86_64-latest", Changes: []string{ `ENTRYPOINT ["/usr/bin/dumb-init", "/entrypoint"]`, }, }, ).Return(nil) imageInspect := image.InspectResponse{ RepoTags: []string{ imageName("", ""), }, } c.On( "ImageInspectWithRaw", mock.Anything, imageName("", ""), ).Return(imageInspect, []byte{}, nil) }, expectedImage: &image.InspectResponse{ RepoTags: []string{ imageName("", ""), }, }, }, "docker import nil is returned if error": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageImportBlocking", mock.Anything, mock.Anything, mock.Anything, mock.Anything, ).Return(errors.New("error")) }, expectedImage: nil, }, "docker import nil is returned if error on inspect": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageImportBlocking", mock.Anything, mock.Anything, mock.Anything, mock.Anything, ).Return(nil) c.On( "ImageInspectWithRaw", mock.Anything, mock.Anything, ).Return(image.InspectResponse{}, []byte{}, errors.New("error")) }, expectedImage: nil, }, "powershell image is used when shell is pwsh": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, Shell: shells.SNPwsh, }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageImportBlocking", mock.Anything, mock.MatchedBy(func(source image.ImportSource) bool { return assert.IsType(t, new(os.File), source.Source) && assert.Equal( t, "prebuilt-alpine-x86_64-pwsh.tar.xz", filepath.Base((source.Source.(*os.File)).Name()), ) }), helperimage.GitLabRegistryName, mock.Anything, ).Return(nil) imageInspect := image.InspectResponse{ RepoTags: []string{ imageName("", "-pwsh"), }, } c.On( "ImageInspectWithRaw", mock.Anything, imageName("", "-pwsh"), ).Return(imageInspect, []byte{}, nil) }, expectedImage: &image.InspectResponse{ RepoTags: []string{ imageName("", "-pwsh"), }, }, }, "powershell image is used when shell is pwsh and flavor ubuntu": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, Flavor: "ubuntu", Shell: shells.SNPwsh, }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageImportBlocking", mock.Anything, mock.MatchedBy(func(source image.ImportSource) bool { return assert.IsType(t, new(os.File), source.Source) && assert.Equal( t, "prebuilt-ubuntu-x86_64-pwsh.tar.xz", filepath.Base((source.Source.(*os.File)).Name()), ) }), helperimage.GitLabRegistryName, mock.Anything, ).Return(nil) imageInspect := image.InspectResponse{ RepoTags: []string{ imageName("ubuntu-", "-pwsh"), }, } c.On( "ImageInspectWithRaw", mock.Anything, imageName("ubuntu-", "-pwsh"), ).Return(imageInspect, []byte{}, nil) }, expectedImage: &image.InspectResponse{ RepoTags: []string{ imageName("ubuntu-", "-pwsh"), }, }, }, "docker load docker image": { config: helperimage.Config{ Architecture: "amd64", OSType: osTypeLinux, Flavor: "ubuntu", }, clientAssertions: func(c *docker.MockClient) { c.On( "ImageLoad", mock.Anything, mock.Anything, true, ).Return(image.LoadResponse{JSON: true, Body: io.NopCloser(strings.NewReader(`{"stream": "Loaded image ID: 1234"}`))}, nil) c.On( "ImageTag", mock.Anything, "1234", imageName("ubuntu-", ""), ).Return(nil) imageInspect := image.InspectResponse{ RepoTags: []string{ imageName("ubuntu-", ""), }, } c.On( "ImageInspectWithRaw", mock.Anything, imageName("ubuntu-", ""), ).Return(imageInspect, []byte{}, nil) }, expectedImage: &image.InspectResponse{ RepoTags: []string{ imageName("ubuntu-", ""), }, }, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { c := docker.NewMockClient(t) info, err := helperimage.Get("", tt.config) require.NoError(t, err) e := &executor{ AbstractExecutor: executors.AbstractExecutor{ Build: &common.Build{ Job: spec.Job{ Variables: tt.jobVariables, }, Runner: &common.RunnerConfig{}, }, Config: common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Shell: tt.config.Shell, Docker: &common.DockerConfig{ HelperImageFlavor: tt.config.Flavor, }, }, }, }, dockerConn: &dockerConnection{Client: c}, helperImageInfo: info, } tt.clientAssertions(c) image := e.getLocalHelperImage() assert.Equal(t, tt.expectedImage, image) }) } } func createFakePrebuiltImages(t *testing.T, architecture string) { t.Helper() // Create fake image files so that tests do not need helper images built tempImgDir := t.TempDir() prevPrebuiltImagesPaths := prebuilt.PrebuiltImagesPaths t.Cleanup(func() { prebuilt.PrebuiltImagesPaths = prevPrebuiltImagesPaths }) prebuilt.PrebuiltImagesPaths = []string{tempImgDir} for _, fakeImgName := range []string{ fmt.Sprintf("prebuilt-alpine-%s.tar.xz", architecture), fmt.Sprintf("prebuilt-alpine-%s-pwsh.tar.xz", architecture), fmt.Sprintf("prebuilt-ubuntu-%s.tar.xz", architecture), fmt.Sprintf("prebuilt-ubuntu-%s-pwsh.tar.xz", architecture), fmt.Sprintf("prebuilt-ubuntu-%s.docker.tar.zst", architecture), fmt.Sprintf("prebuilt-windows-nanoserver-ltsc2019-%s.docker.tar.zst", architecture), } { require.NoError(t, os.WriteFile(filepath.Join(tempImgDir, fakeImgName), nil, 0666)) } } func TestGetUIDandGID(t *testing.T) { ctx := t.Context() testContainerID := "test-ID" testImageSHA := "test-SHA" testUID := 456 testGID := 789 tests := map[string]struct { mockInspect func(t *testing.T, i *user.MockInspect) expectedError error }{ "UID check returns error": { mockInspect: func(t *testing.T, i *user.MockInspect) { i.On("UID", ctx, testContainerID).Return(0, assert.AnError).Once() }, expectedError: assert.AnError, }, "UID check succeeds, GID check returns error": { mockInspect: func(t *testing.T, i *user.MockInspect) { i.On("UID", ctx, testContainerID).Return(testUID, nil).Once() i.On("GID", ctx, testContainerID).Return(0, assert.AnError).Once() }, expectedError: assert.AnError, }, "both checks succeed": { mockInspect: func(t *testing.T, i *user.MockInspect) { i.On("UID", ctx, testContainerID).Return(testUID, nil).Once() i.On("GID", ctx, testContainerID).Return(testGID, nil).Once() }, expectedError: nil, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { inspectMock := user.NewMockInspect(t) tt.mockInspect(t, inspectMock) log, _ := logrustest.NewNullLogger() uid, gid, err := getUIDandGID(ctx, log, inspectMock, testContainerID, testImageSHA) if tt.expectedError != nil { assert.Equal(t, 0, uid) assert.Equal(t, 0, gid) assert.ErrorIs(t, err, tt.expectedError) return } assert.NoError(t, err) assert.Equal(t, testUID, uid) assert.Equal(t, testGID, gid) }) } } func TestExpandingDockerImageWithImagePullPolicyAlways(t *testing.T) { dockerConfig := &common.DockerConfig{ Memory: "42m", } imageConfig := spec.Image{ Name: "alpine", PullPolicies: []spec.PullPolicy{common.PullPolicyAlways}, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(44040192), hostConfig.Memory) } c, e := prepareTestDockerConfiguration(t, dockerConfig, cce, "alpine", "alpine:latest") c.On("ContainerInspect", mock.Anything, "abc"). Return(container.InspectResponse{}, nil).Once() err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) cfgTor := newDefaultContainerConfigurator(e, buildContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) assert.NoError(t, err, "Should create container without errors") } func TestExpandingDockerImageWithImagePullPolicyNever(t *testing.T) { dockerConfig := &common.DockerConfig{ Memory: "42m", } imageConfig := spec.Image{ Name: "alpine", PullPolicies: []spec.PullPolicy{common.PullPolicyNever}, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, int64(44040192), hostConfig.Memory) } _, e := createExecutorForTestDockerConfiguration(t, dockerConfig, cce) err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) cfgTor := newDefaultContainerConfigurator(e, buildContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) assert.Contains( t, err.Error(), `invalid pull policy for image "alpine"`, ) assert.Contains( t, err.Error(), fmt.Sprintf("pull_policy (%v) defined in %s is not one of the allowed_pull_policies (%v)", "[never]", "GitLab pipeline config", "[always]"), ) } func TestDockerImageWithVariablePlatform(t *testing.T) { // Test with and without setting the platform to make sure that variable expansion works in both cases for _, platform := range []string{"linux/amd64", ""} { c := docker.NewMockClient(t) p := pull.NewMockManager(t) // Ensure that the pull manager gets called with the expanded platform p.On("GetDockerImage", mock.Anything, spec.ImageDockerOptions{Platform: platform}, mock.Anything). Return(nil, nil). Once() e := executorWithMockClient(c) e.pullManager = p e.Config.Docker = &common.DockerConfig{} imageConfig := spec.Image{ Name: "alpine", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ Platform: "${PLATFORM}", }, }, PullPolicies: []spec.PullPolicy{common.PullPolicyAlways}, } e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: "PLATFORM", Value: platform, }) _, err := e.expandAndGetDockerImage(imageConfig.Name, []string{}, imageConfig.ExecutorOptions.Docker, imageConfig.PullPolicies) assert.NoError(t, err) } } func TestExpandingVolumeDestination(t *testing.T) { dockerClient := docker.NewMockClient(t) executor := executorWithMockClient(dockerClient) executor.Build = &common.Build{ Job: spec.Job{ Variables: spec.Variables{ spec.Variable{Key: "JOB_VAR_1", Value: "1"}, spec.Variable{Key: "JOB_VAR_2", Value: "2"}, spec.Variable{Key: "COMBINED_VAR", Value: "${JOB_VAR_1}-${JOB_VAR_2}-3"}, }, JobInfo: spec.JobInfo{ ProjectID: 1234, }, }, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "theToken", }, SystemID: "some-system-id", }, ProjectRunnerID: 5678, } executor.Config = common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: &common.DockerConfig{ CacheDir: "", Volumes: []string{ // source should not be expanded, destination should be expanded "/host/${COMBINED_VAR}:/tmp/${COMBINED_VAR}", // a new volume for the expanded destination should be created "/new/cache/vol-${COMBINED_VAR}-foo", // expected to be passed on as is "/${:/tmp", "/host:/tmp/foo/$", }, }, }, } // We need to explicitly connect, as we don't run Prepare where this would usually happen. // In this context, this is only used to create a connection based on the mock client, and slap that onto the executor // struct for later use. err := executor.dockerConnector.Connect(t.Context(), common.ExecutorPrepareOptions{}, executor) assert.NoError(t, err, "connecting connector") executor.volumeParser = parser.NewLinuxParser(executor.ExpandValue) err = executor.createLabeler() assert.NoError(t, err, "creating labeler") err = executor.createVolumesManager() assert.NoError(t, err, "creating volumes manager") // for the cache volume we expect a volume creation call expectedVolume := func(co volume.CreateOptions) bool { // name build from hashed runner/build stuff & the md5sum of the (expanded) destination ("/new/cache/vol-1-2-3-foo") isExpected := assert.Equal(t, "runner-cb27ac1df55ad5c5857ef343b03639cf-cache-bffb7fe32becf1f1e4d6c9604d09f9d7", co.Name) // check for some labels, specifically the ones that moved from the volume name to metadata expectedLabels := map[string]string{ "com.gitlab.gitlab-runner.project.id": "1234", "com.gitlab.gitlab-runner.project.runner_id": "5678", "com.gitlab.gitlab-runner.runner.id": "theToken", "com.gitlab.gitlab-runner.runner.system_id": "some-system-id", } for expectedKey, expectedValue := range expectedLabels { actualValue, exists := co.Labels[expectedKey] isExpected = isExpected && assert.True(t, exists, "expected volume label %q, but got none", expectedKey) && assert.Equal(t, expectedValue, actualValue, "volume label %q", expectedKey) } return isExpected } dockerClient.On("VolumeCreate", mock.Anything, mock.MatchedBy(expectedVolume)). Return(volume.Volume{}, nil). Once() err = executor.createVolumes() assert.NoError(t, err, "creating volumes") // the volume manager is expected to have some binds set up expectedBinds := []string{ // expansion only in the destination "/host/${COMBINED_VAR}:/tmp/1-2-3", // var ref in the middle of the string "/new/cache/vol-1-2-3-foo", // invalid var refs are passed on (to fail later, if really invalid) "/${:/tmp", "/host:/tmp/foo/$", } assert.ElementsMatch(t, expectedBinds, executor.volumesManager.Binds()) } func TestDockerImageWithUser(t *testing.T) { tests := map[string]struct { jobUser spec.StringOrInt64 runnerUser, want string allowedUsers []string wantErr bool }{ "no allowed users, neither specified": {}, "no allowed users, runner user specified": {runnerUser: "baba", want: "baba"}, "no allowed users, job user specified": {jobUser: "baba", want: "baba"}, "no allowed users, both specified": {runnerUser: "baba", jobUser: "yaga", want: "baba"}, "ok allowed users, neither specified": {allowedUsers: []string{"baba"}}, "ok allowed users, runner user specified": {allowedUsers: []string{"baba"}, runnerUser: "baba", want: "baba"}, "ok allowed users, job user specified": {allowedUsers: []string{"baba"}, jobUser: "baba", want: "baba"}, "ok allowed users, both specified": {allowedUsers: []string{"baba"}, runnerUser: "baba", jobUser: "yaga", want: "baba"}, "ok allowed users, job user as variable": {allowedUsers: []string{"baba"}, jobUser: "${TTUSER}", want: "baba"}, "bad allowed users, runner user specified": {allowedUsers: []string{"yaga"}, runnerUser: "baba", want: "", wantErr: true}, "bad allowed users, job user specified": {allowedUsers: []string{"yaga"}, jobUser: "baba", want: "", wantErr: true}, "bad allowed users, both specified": {allowedUsers: []string{"blammo"}, runnerUser: "baba", jobUser: "yaga", want: "", wantErr: true}, } for name, tt := range tests { t.Run(name, func(t *testing.T) { dockerConfig := &common.DockerConfig{ User: tt.runnerUser, AllowedUsers: tt.allowedUsers, } imageConfig := spec.Image{ Name: "alpine", ExecutorOptions: spec.ImageExecutorOptions{ Docker: spec.ImageDockerOptions{ User: tt.jobUser, }, }, } cce := func(t *testing.T, config *container.Config, _ *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, tt.want, config.User) } c, e := createExecutorForTestDockerConfiguration(t, dockerConfig, cce) c.On("ImageInspectWithRaw", mock.Anything, mock.Anything). Return(image.InspectResponse{ID: "123"}, []byte{}, nil).Maybe() c.On("ImagePullBlocking", mock.Anything, mock.Anything, mock.Anything). Return(nil).Maybe() c.On("NetworkList", mock.Anything, mock.Anything). Return([]network.Summary{}, nil).Maybe() c.On("ContainerRemove", mock.Anything, mock.Anything, mock.Anything). Return(nil).Maybe() c.On("ContainerInspect", mock.Anything, "abc"). Return(container.InspectResponse{}, nil).Maybe() e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: "TTUSER", Value: tt.want, }) err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) cfgTor := newDefaultContainerConfigurator(e, buildContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) if !tt.wantErr { require.NoError(t, err) } else { require.Contains(t, err.Error(), "is not an allowed user") } }) } } func TestDockerConfigGetLogConfig(t *testing.T) { tests := []struct { name string logOptions map[string]string expectedConfig map[string]string expectedError string }{ { name: "empty log options", }, { name: "with env option", logOptions: map[string]string{"env": "CI_JOB_ID,CI_JOB_NAME,CI_PROJECT_ID"}, expectedConfig: map[string]string{"env": "CI_JOB_ID,CI_JOB_NAME,CI_PROJECT_ID"}, }, { name: "with labels and env options", logOptions: map[string]string{"labels": "com.gitlab.gitlab-runner.job.id,com.gitlab.gitlab-runner.project.id", "env": "CI_JOB_ID,CI_JOB_NAME,CI_PROJECT_ID"}, expectedConfig: map[string]string{"labels": "com.gitlab.gitlab-runner.job.id,com.gitlab.gitlab-runner.project.id", "env": "CI_JOB_ID,CI_JOB_NAME,CI_PROJECT_ID"}, }, { name: "invalid key", logOptions: map[string]string{"foo": "bar"}, expectedError: `creating docker log configuration: invalid log options: only ["env" "labels"] are allowed, but found: ["foo"]`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := docker.NewMockClient(t) if tt.expectedError == "" { mockExecutorPrepareInteraction(t, c) } e := executorWithMockClient(c) build := &common.Build{ Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: &common.DockerConfig{ Image: "some-image", LogOptions: tt.logOptions, }, }, }, } err := e.Prepare(common.ExecutorPrepareOptions{ Context: t.Context(), Build: build, BuildLogger: buildlogger.New(&common.Trace{Writer: io.Discard}, logrus.WithField("test", t.Name()), buildlogger.Options{}), Config: build.Runner, }) if tt.expectedError != "" { var buildErr *common.BuildError assert.ErrorAs(t, err, &buildErr, "expected error to be a *common.BuildError") assert.Equal(t, common.RunnerSystemFailure, buildErr.FailureReason, "expected a system failure") assert.Equal(t, tt.expectedError, buildErr.Error()) return // when prepare fails, we can bail out } else { require.NoError(t, err) } hasExpectedLogConfig := func(t *testing.T, hostConfig *container.HostConfig) { t.Helper() assert.Equal(t, "json-file", hostConfig.LogConfig.Type) assert.Equal(t, tt.logOptions, hostConfig.LogConfig.Config) } t.Run("build container", func(t *testing.T) { buildContainerHostConfig, err := e.createHostConfig(true, false) assert.NoError(t, err, "creating build container's host config") hasExpectedLogConfig(t, buildContainerHostConfig) }) t.Run("service container", func(t *testing.T) { serviceContainerHostConfig, err := e.createHostConfigForService(false, nil, nil) assert.NoError(t, err, "creating service container's host config") hasExpectedLogConfig(t, serviceContainerHostConfig) }) }) } } // mockExecutorPrepareInteraction mocks out interactions the executor does with the docker client, so that Prepare can // succeed. func mockExecutorPrepareInteraction(t *testing.T, c *docker.MockClient) { waitResponseCh := make(chan container.WaitResponse) errCh := make(chan error) tCtx := t.Context() go func() { for { select { case waitResponseCh <- container.WaitResponse{}: // noop, just send out case errCh <- nil: // noop, just send out case <-tCtx.Done(): return } } }() c.EXPECT(). ImageInspectWithRaw(mock.Anything, mock.Anything). Return(image.InspectResponse{}, []byte{}, nil). Once() c.EXPECT(). VolumeCreate(mock.Anything, mock.Anything). Return(volume.Volume{Name: ""}, nil). Once() c.EXPECT(). ContainerCreate(mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything). Return(container.CreateResponse{}, nil). Once() c.EXPECT(). ContainerStart(mock.Anything, mock.Anything, mock.Anything). Return(nil). Once() c.EXPECT(). ContainerWait(mock.Anything, mock.Anything, mock.Anything). Return(waitResponseCh, errCh). Once() c.EXPECT(). ContainerRemove(mock.Anything, mock.Anything, mock.Anything). Return(nil). Once() } var _ executors.Environment = (*env)(nil) type env struct { client *envClient } var _ executors.Client = &envClient{} type envClient struct { dialed bool } func (c *envClient) Dial(n string, addr string) (net.Conn, error) { c.dialed = true return nil, assert.AnError } func (c *envClient) Run(ctx context.Context, options executors.RunOptions) error { return nil } func (c *envClient) DialRun(ctx context.Context, command string) (net.Conn, error) { c.dialed = true return nil, assert.AnError } func (c *envClient) Close() error { return nil } func (e *env) WithContext(ctx context.Context) (context.Context, context.CancelFunc) { return context.WithCancel(ctx) } func (e *env) Prepare( ctx context.Context, logger buildlogger.Logger, options common.ExecutorPrepareOptions, ) (executors.Client, error) { e.client = &envClient{} return e.client, nil } func TestConnectEnvironment(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) e := &executor{ AbstractExecutor: executors.AbstractExecutor{ ExecutorOptions: executors.ExecutorOptions{}, }, } e.volumeParser = parser.NewLinuxParser(e.ExpandValue) env := &env{} build := &common.Build{ Job: spec.Job{ Image: spec.Image{ Name: "test", }, }, Runner: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: &common.DockerConfig{}, }, }, ExecutorData: env, } err := e.Prepare(common.ExecutorPrepareOptions{ Config: &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ BuildsDir: "/tmp", CacheDir: "/tmp", Shell: "bash", Docker: build.Runner.Docker, }, }, Build: build, Context: t.Context(), }) require.ErrorIs(t, err, assert.AnError) require.NotNil(t, env.client) require.True(t, env.client.dialed) } func TestTooManyServicesRequestedError(t *testing.T) { t.Parallel() t.Run(".Is()", func(t *testing.T) { tests := map[string]struct { err1 tooManyServicesRequestedError err2 tooManyServicesRequestedError want bool }{ "matching errors": { err1: tooManyServicesRequestedError{allowed: 1, requested: 2}, err2: tooManyServicesRequestedError{allowed: 1, requested: 2}, want: true, }, "mismatching allowed field": { err1: tooManyServicesRequestedError{allowed: 1, requested: 2}, err2: tooManyServicesRequestedError{allowed: 10, requested: 2}, want: false, }, "mismatching requested field": { err1: tooManyServicesRequestedError{allowed: 1, requested: 2}, err2: tooManyServicesRequestedError{allowed: 1, requested: 20}, want: false, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { have := test.err1.Is(&test.err2) assert.Equal(t, test.want, have) }) } }) } func Test_bootstrap(t *testing.T) { type testCase struct { setup func(*volumes.MockManager, *docker.MockClient, *common.Build) []string expectedBinds []string wantStage common.ExecutorStage } tests := map[string]map[string]testCase{ "linux": { "native steps enabled": { expectedBinds: []string{"/opt/gitlab-runner"}, wantStage: ExecutorStageBootstrap, setup: func(vm *volumes.MockManager, c *docker.MockClient, b *common.Build) []string { binds := make([]string, 1) name := "blablabla" b.Job.Run = []schema.Step{{Name: &name}} c.EXPECT().ImageInspectWithRaw(mock.Anything, mock.Anything).Return(image.InspectResponse{ ID: "helper-id", }, nil, nil) c.EXPECT().ContainerCreate(mock.Anything, &container.Config{ Image: "helper-id", Cmd: []string{"gitlab-runner-helper", "steps", "bootstrap", bootstrappedBinary}, Tty: false, AttachStdin: false, AttachStdout: true, AttachStderr: true, OpenStdin: false, StdinOnce: true, NetworkDisabled: true, }, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(container.CreateResponse{ID: "container-id"}, nil) c.EXPECT().ContainerAttach(mock.Anything, "container-id", mock.Anything).Return(types.HijackedResponse{ Reader: bufio.NewReader(strings.NewReader("")), Conn: &net.UnixConn{}, }, nil) c.EXPECT().ContainerRemove(mock.Anything, "container-id", mock.Anything).Return(nil) bodyCh := make(chan container.WaitResponse, 1) bodyCh <- container.WaitResponse{StatusCode: 0} c.EXPECT().ContainerWait(mock.Anything, "container-id", container.WaitConditionNextExit). Return((<-chan container.WaitResponse)(bodyCh), nil) c.EXPECT().ContainerStart(mock.Anything, "container-id", mock.Anything).Return(nil) vm.EXPECT().CreateTemporary(mock.Anything, "/opt/gitlab-runner"). Return(nil). Run(func(ctx context.Context, destination string) { binds[0] = destination }). Once() vm.EXPECT().Binds().Return(binds).Once() return binds }, }, "native steps not enabled": { setup: func(vm *volumes.MockManager, c *docker.MockClient, b *common.Build) []string { b.Variables = append(b.Variables, spec.Variable{ Key: "FF_SCRIPT_TO_STEP_MIGRATION", Value: "false", }) return nil }, }, }, "windows": { "native steps enabled": {}, "native steps not enabled": {}, }, } for name, tt := range tests[runtime.GOOS] { t.Run(name, func(t *testing.T) { c := docker.NewMockClient(t) vm := volumes.NewMockManager(t) e := executor{ volumesManager: vm, dockerConn: &dockerConnection{Client: c}, AbstractExecutor: executors.AbstractExecutor{ Context: t.Context(), Build: &common.Build{ ExecutorFeatures: common.FeaturesInfo{ NativeStepsIntegration: true, }, }, Config: common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ Docker: &common.DockerConfig{}, }, }, }, } var binds []string if tt.setup != nil { binds = tt.setup(vm, c, e.Build) } assert.NoError(t, e.bootstrap()) assert.Equal(t, tt.expectedBinds, binds) assert.Equal(t, tt.wantStage, e.GetCurrentStage()) }) } } // TestDockerSlotCgroupSettings verifies that slot-based cgroup settings // are actually applied to container HostConfig when creating containers func TestDockerSlotCgroupSettings(t *testing.T) { t.Run("Build container with slot cgroups enabled", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, SlotCgroupTemplate: "runner/slot-${slot}", Docker: &common.DockerConfig{ CgroupParent: "should-not-use-this", }, }, } // Verify HostConfig.CgroupParent is set to slot-based value cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "runner/slot-5", hostConfig.CgroupParent, "HostConfig.CgroupParent should be set to slot-based value") } testDockerConfigurationWithSlotCgroups(t, runnerConfig, &mockAutoscalerExecutorData{slot: 5}, cce) }) t.Run("Build container with slot cgroups enabled using default template", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, Docker: &common.DockerConfig{ CgroupParent: "fallback-cgroup", }, }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "gitlab-runner/slot-10", hostConfig.CgroupParent, "HostConfig.CgroupParent should use default template") } testDockerConfigurationWithSlotCgroups(t, runnerConfig, &mockAutoscalerExecutorData{slot: 10}, cce) }) t.Run("Build container with slot cgroups disabled", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: false, Docker: &common.DockerConfig{ CgroupParent: "static-build-cgroup", }, }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "static-build-cgroup", hostConfig.CgroupParent, "HostConfig.CgroupParent should use static value when slot cgroups disabled") } testDockerConfigurationWithSlotCgroups(t, runnerConfig, &mockAutoscalerExecutorData{slot: 5}, cce) }) t.Run("Build container with slot cgroups enabled but no slot available", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, Docker: &common.DockerConfig{ CgroupParent: "fallback-build-cgroup", }, }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { assert.Equal(t, "fallback-build-cgroup", hostConfig.CgroupParent, "HostConfig.CgroupParent should fallback when no slot available") } testDockerConfigurationWithSlotCgroups(t, runnerConfig, nil, cce) }) t.Run("Service container with slot cgroups enabled", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, Docker: &common.DockerConfig{ ServiceCgroupParent: "should-not-use-this", ServiceSlotCgroupTemplate: "runner/service-${slot}", }, }, } testDockerServiceContainerCgroup(t, runnerConfig, &mockAutoscalerExecutorData{slot: 7}, "runner/service-7") }) t.Run("Service container with slot cgroups enabled using default template", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, Docker: &common.DockerConfig{ ServiceCgroupParent: "fallback-service", }, }, } testDockerServiceContainerCgroup(t, runnerConfig, &mockAutoscalerExecutorData{slot: 3}, "gitlab-runner/slot-3") }) t.Run("Service container with slot cgroups disabled", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: false, Docker: &common.DockerConfig{ ServiceCgroupParent: "static-service-cgroup", }, }, } testDockerServiceContainerCgroup(t, runnerConfig, &mockAutoscalerExecutorData{slot: 5}, "static-service-cgroup") }) t.Run("Service container with slot cgroups enabled but no slot available", func(t *testing.T) { runnerConfig := &common.RunnerConfig{ RunnerSettings: common.RunnerSettings{ UseSlotCgroups: true, Docker: &common.DockerConfig{ ServiceCgroupParent: "fallback-service-cgroup", }, }, } testDockerServiceContainerCgroup(t, runnerConfig, nil, "fallback-service-cgroup") }) } // Mock ExecutorData for testing slot functionality type mockAutoscalerExecutorData struct { slot int } func (m *mockAutoscalerExecutorData) AcquisitionSlot() int { return m.slot } // testDockerConfigurationWithSlotCgroups tests that build containers are created with slot-based cgroups func testDockerConfigurationWithSlotCgroups( t *testing.T, runnerConfig *common.RunnerConfig, executorData interface{}, cce containerConfigExpectations, ) { c, e := prepareTestDockerConfiguration(t, runnerConfig.Docker, cce, "alpine", "alpine:latest") c.On("ContainerInspect", mock.Anything, "abc"). Return(container.InspectResponse{}, nil).Once() // Set the executor data for slot testing e.Build.ExecutorData = executorData // Set the runner config for slot testing e.Config = *runnerConfig err := e.createVolumesManager() require.NoError(t, err) err = e.createPullManager() require.NoError(t, err) imageConfig := spec.Image{Name: "alpine"} cfgTor := newDefaultContainerConfigurator(e, buildContainerType, imageConfig, []string{"/bin/sh"}, []string{}) _, err = e.createContainer(buildContainerType, imageConfig, []string{}, cfgTor) assert.NoError(t, err, "Should create container without errors") } // testDockerServiceContainerCgroup tests that service containers are created with the expected cgroup parent func testDockerServiceContainerCgroup( t *testing.T, runnerConfig *common.RunnerConfig, executorData interface{}, expectedCgroup string, ) { // Create mock docker client c := docker.NewMockClient(t) // Create mock volumes manager vm := volumes.NewMockManager(t) vm.On("Binds").Return([]string{}) e := new(executor) e.dockerConn = &dockerConnection{Client: c} e.Config = *runnerConfig e.Build = &common.Build{ ExecutorData: executorData, } e.volumesManager = vm // Call createHostConfigForService and verify the cgroup is set correctly hostConfig, err := e.createHostConfigForService(false, nil, nil) require.NoError(t, err) assert.Equal(t, expectedCgroup, hostConfig.CgroupParent, "Service container HostConfig.CgroupParent should be set correctly") } func TestPrepareContainerEnvVariables(t *testing.T) { test.SkipIfGitLabCIOn(t, test.OSWindows) tests := map[string]struct { featureFlagEnabled bool jobVariables spec.Variables expectedVarNames []string shouldHaveRunnerVarNames bool }{ "feature flag disabled returns variables unchanged": { featureFlagEnabled: false, jobVariables: spec.Variables{ {Key: "VAR1", Value: "value1"}, {Key: "VAR2", Value: "value2"}, }, shouldHaveRunnerVarNames: false, }, "feature flag enabled compresses variable names": { featureFlagEnabled: true, jobVariables: spec.Variables{ {Key: "VAR1", Value: "value1"}, {Key: "VAR2", Value: "value2"}, {Key: "VAR3", Value: "value3"}, }, expectedVarNames: []string{"VAR1", "VAR2", "VAR3"}, shouldHaveRunnerVarNames: true, }, "feature flag enabled with empty variables": { featureFlagEnabled: true, jobVariables: spec.Variables{}, shouldHaveRunnerVarNames: true, }, "feature flag enabled with many variables": { featureFlagEnabled: true, jobVariables: spec.Variables{ {Key: "LONG_VARIABLE_NAME_1", Value: "value1"}, {Key: "LONG_VARIABLE_NAME_2", Value: "value2"}, {Key: "LONG_VARIABLE_NAME_3", Value: "value3"}, }, expectedVarNames: []string{"LONG_VARIABLE_NAME_1", "LONG_VARIABLE_NAME_2", "LONG_VARIABLE_NAME_3"}, shouldHaveRunnerVarNames: true, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { e := &executor{ AbstractExecutor: executors.AbstractExecutor{ Build: &common.Build{ Job: spec.Job{ Variables: test.jobVariables, }, }, }, } // Set the feature flag if test.featureFlagEnabled { e.Build.ExecutorFeatures.NativeStepsIntegration = test.featureFlagEnabled e.Build.Variables = append(e.Build.Variables, spec.Variable{ Key: featureflags.UseScriptToStepMigration, Value: "true", }) } result, err := e.prepareContainerEnvVariables() require.NoError(t, err) require.NotNil(t, result) require.Equal(t, test.shouldHaveRunnerVarNames, checkVariable(result, runnerJobVarsNames)) }) } } func checkVariable(vars spec.Variables, key string) bool { for i := range vars { if vars[i].Key == key { return true } } return false } func TestRemoveContainerVolumeKeep(t *testing.T) { tests := []struct { name string volumeKeep bool expectedRemoveVolumes bool }{ { name: "VolumeKeep=false removes volumes", volumeKeep: false, expectedRemoveVolumes: true, }, { name: "VolumeKeep=true preserves volumes", volumeKeep: true, expectedRemoveVolumes: false, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { c := docker.NewMockClient(t) e := &executor{} e.dockerConn = &dockerConnection{Client: c} e.Config.Docker = &common.DockerConfig{VolumeKeep: tc.volumeKeep} e.BuildLogger = buildlogger.New(nil, logrus.WithFields(logrus.Fields{}), buildlogger.Options{}) c.On("NetworkList", mock.Anything, mock.Anything). Return([]network.Summary{}, nil).Once() expectedOptions := container.RemoveOptions{ RemoveVolumes: tc.expectedRemoveVolumes, Force: true, } c.On("ContainerRemove", mock.Anything, "test-container-id", expectedOptions). Return(nil).Once() err := e.removeContainer(t.Context(), "test-container-id") assert.NoError(t, err) }) } } func TestProcessSecurityOpt(t *testing.T) { // Create a temporary seccomp profile file seccompProfile := `{"defaultAction":"SCMP_ACT_ERRNO"}` seccompProfilePath := filepath.Join(t.TempDir(), "seccomp-profile.json") require.NoError(t, os.WriteFile(seccompProfilePath, []byte(seccompProfile), 0644)) tests := map[string]struct { securityOpts []string expectedOpts []string expectedError string }{ "empty security opts": { securityOpts: []string{}, expectedOpts: []string{}, }, "nil security opts": { securityOpts: nil, expectedOpts: nil, }, "non-seccomp options pass through": { securityOpts: []string{"apparmor=unconfined", "no-new-privileges"}, expectedOpts: []string{"apparmor=unconfined", "no-new-privileges"}, }, "seccomp=unconfined passes through": { securityOpts: []string{"seccomp=unconfined"}, expectedOpts: []string{"seccomp=unconfined"}, }, "seccomp=builtin passes through": { securityOpts: []string{"seccomp=builtin"}, expectedOpts: []string{"seccomp=builtin"}, }, "bare seccomp without value passes through": { securityOpts: []string{"seccomp"}, expectedOpts: []string{"seccomp"}, }, "inline seccomp JSON passes through": { securityOpts: []string{`seccomp={"defaultAction":"SCMP_ACT_ERRNO"}`}, expectedOpts: []string{`seccomp={"defaultAction":"SCMP_ACT_ERRNO"}`}, }, "seccomp profile path is loaded": { securityOpts: []string{fmt.Sprintf("seccomp=%s", seccompProfilePath)}, expectedOpts: []string{fmt.Sprintf("seccomp=%s", seccompProfile)}, }, "mixed security options": { securityOpts: []string{ "apparmor=unconfined", fmt.Sprintf("seccomp=%s", seccompProfilePath), "no-new-privileges", }, expectedOpts: []string{ "apparmor=unconfined", fmt.Sprintf("seccomp=%s", seccompProfile), "no-new-privileges", }, }, "non-existent file returns error": { securityOpts: []string{"seccomp=/nonexistent/profile.json"}, expectedError: "failed to read seccomp profile from /nonexistent/profile.json", }, } for testName, tt := range tests { t.Run(testName, func(t *testing.T) { logger, _ := logrustest.NewNullLogger() e := &executor{ AbstractExecutor: executors.AbstractExecutor{ BuildLogger: buildlogger.New(nil, logger.WithField("test", t.Name()), buildlogger.Options{}), }, } result, err := e.processSecurityOpt(tt.securityOpts) if tt.expectedError != "" { assert.Error(t, err) assert.Contains(t, err.Error(), tt.expectedError) return } require.NoError(t, err) assert.Equal(t, tt.expectedOpts, result) }) } } func TestDockerSecurityOptSetting(t *testing.T) { // Create a temporary seccomp profile file seccompProfile := `{"defaultAction":"SCMP_ACT_ERRNO"}` seccompProfilePath := filepath.Join(t.TempDir(), "seccomp-profile.json") require.NoError(t, os.WriteFile(seccompProfilePath, []byte(seccompProfile), 0644)) dockerConfig := &common.DockerConfig{ SecurityOpt: []string{ fmt.Sprintf("seccomp=%s", seccompProfilePath), "apparmor=unconfined", }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { expected := []string{ fmt.Sprintf("seccomp=%s", seccompProfile), "apparmor=unconfined", } assert.Equal(t, expected, hostConfig.SecurityOpt) } testDockerConfigurationWithJobContainer(t, dockerConfig, cce) } func TestDockerServicesSecurityOptSetting(t *testing.T) { // Create a temporary seccomp profile file seccompProfile := `{"defaultAction":"SCMP_ACT_ERRNO"}` seccompProfilePath := filepath.Join(t.TempDir(), "seccomp-profile.json") require.NoError(t, os.WriteFile(seccompProfilePath, []byte(seccompProfile), 0644)) dockerConfig := &common.DockerConfig{ ServicesSecurityOpt: []string{ fmt.Sprintf("seccomp=%s", seccompProfilePath), "apparmor=unconfined", }, } cce := func(t *testing.T, config *container.Config, hostConfig *container.HostConfig, _ *network.NetworkingConfig) { expected := []string{ fmt.Sprintf("seccomp=%s", seccompProfile), "apparmor=unconfined", } assert.Equal(t, expected, hostConfig.SecurityOpt) } testDockerConfigurationWithServiceContainer(t, dockerConfig, cce) } ================================================ FILE: executors/docker/internal/exec/exec.go ================================================ package exec import ( "context" "errors" "io" "net" "github.com/docker/docker/api/types/container" "github.com/docker/docker/pkg/stdcopy" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/wait" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" ) // conn is an interface wrapper used to generate mocks that are next used for tests // nolint:deadcode type conn interface { net.Conn } // reader is an interface wrapper used to generate mocks that are next used for tests // nolint:deadcode type reader interface { io.Reader } type IOStreams struct { Stdin io.Reader Stdout io.Writer Stderr io.Writer } type Docker interface { Exec(ctx context.Context, containerID string, streams IOStreams, gracefulExitFunc wait.GracefulExitFunc) error } // NewDocker returns a client for starting a new container and running a // command inside of it. // // The context passed is used to wait for any created container to stop. This // is likely an executor's context. This means that waits to stop are only ever // canceled should the job be aborted (either manually, or by exceeding the // build time). func NewDocker(ctx context.Context, c docker.Client, waiter wait.KillWaiter, logger logrus.FieldLogger) Docker { return &defaultDocker{ ctx: ctx, c: c, waiter: waiter, logger: logger, } } type defaultDocker struct { ctx context.Context c docker.Client waiter wait.KillWaiter logger logrus.FieldLogger } func (d *defaultDocker) Exec(ctx context.Context, containerID string, streams IOStreams, gracefulExitFunc wait.GracefulExitFunc) error { d.logger.Debugln("Attaching to container", containerID, "...") hijacked, err := d.c.ContainerAttach(ctx, containerID, attachOptions()) if err != nil { return err } defer hijacked.Close() d.logger.Debugln("Starting container", containerID, "...") err = d.c.ContainerStart(ctx, containerID, container.StartOptions{}) if err != nil { return err } // stdout/stdin error channels, buffered intentionally so that if select{} // below exits, the go routines don't block forever upon container exit. stdoutErrCh := make(chan error, 1) stdinErrCh := make(chan error, 1) // Copy any output to the build trace go func() { _, errCopy := stdcopy.StdCopy(streams.Stdout, streams.Stderr, hijacked.Reader) // this goroutine can continue even whilst StopKillWait is in flight, // allowing a graceful stop. If reading stdout returns, we must close // attached connection, otherwise kills can be interfered with and // block indefinitely. hijacked.Close() stdoutErrCh <- errCopy }() // Write the input to the container and close its STDIN to get it to finish go func() { _, errCopy := io.Copy(hijacked.Conn, streams.Stdin) _ = hijacked.CloseWrite() if errCopy != nil { stdinErrCh <- errCopy } }() // Wait until either: // - the job is aborted/cancelled/deadline exceeded // - stdin has an error // - stdout returns an error or nil, indicating the stream has ended and // the container has exited select { case <-ctx.Done(): err = errors.New("aborted") case err = <-stdinErrCh: case err = <-stdoutErrCh: } if err != nil { d.logger.Debugln("Container", containerID, "finished with", err) } // Try to gracefully stop, then kill and wait for the exit. // Containers are stopped so that they can be reused by the job. // // It's very likely that at this point, the context passed to Exec has // been cancelled, so is unable to be used. Instead, we use the context // passed to NewDocker. return d.waiter.StopKillWait(d.ctx, containerID, nil, gracefulExitFunc) } func attachOptions() container.AttachOptions { return container.AttachOptions{ Stream: true, Stdin: true, Stdout: true, Stderr: true, } } ================================================ FILE: executors/docker/internal/exec/exec_test.go ================================================ //go:build !integration package exec import ( "bufio" "bytes" "context" "errors" "fmt" "io" "sync" "testing" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/container" "github.com/docker/docker/pkg/stdcopy" "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/wait" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" ) func TestDefaultDocker_Exec(t *testing.T) { id := "container-id" input := func(t *testing.T, err error) io.Reader { r := newMockReader(t) r.On("Read", mock.Anything). Return(0, err). Maybe() return r } mockWorkingClient := func( t *testing.T, clientMock *docker.MockClient, reader io.Reader, expectedCtx context.Context, ) { conn := newMockConn(t) conn.On("Close").Return(nil).Maybe() conn.On("Write", mock.Anything).Return(0, nil).Maybe() hijacked := types.HijackedResponse{ Conn: conn, Reader: bufio.NewReader(reader), } clientMock.On("ContainerAttach", expectedCtx, id, attachOptions()). Return(hijacked, nil). Once() clientMock.On("ContainerStart", expectedCtx, id, container.StartOptions{}). Return(nil). Once() } tests := map[string]struct { input io.Reader cancelContext bool setupDockerClient func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) setupKillWaiter func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) assertLogOutput func(t *testing.T, logOutput string) expectedError error expectedStdOut string expectedStdErr string }{ "ContainerAttach error": { cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ContainerAttach", expectedCtx, id, attachOptions()). Return(types.HijackedResponse{}, assert.AnError). Once() }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) {}, assertLogOutput: func(t *testing.T, logOutput string) {}, expectedError: assert.AnError, }, "ContainerStart error": { cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { conn := newMockConn(t) conn.On("Close").Return(nil).Once() hijacked := types.HijackedResponse{ Conn: conn, } clientMock.On("ContainerAttach", expectedCtx, id, attachOptions()). Return(hijacked, nil). Once() clientMock.On("ContainerStart", expectedCtx, id, container.StartOptions{}). Return(assert.AnError). Once() }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) {}, assertLogOutput: func(t *testing.T, logOutput string) {}, expectedError: assert.AnError, }, "context done": { input: input(t, io.EOF), cancelContext: true, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { reader := newMockReader(t) reader.On("Read", mock.Anything). Return(0, nil).Maybe() mockWorkingClient(t, clientMock, reader, expectedCtx) }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) { waiterMock.On("StopKillWait", expectedCtx, id, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")).Return(nil).Once() }, assertLogOutput: func(t *testing.T, logOutput string) { assert.Contains(t, logOutput, "finished with aborted") }, expectedError: nil, }, "input error": { input: input(t, errors.New("input error")), cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { reader := newMockReader(t) reader.On("Read", mock.Anything). Return(0, nil).Maybe() mockWorkingClient(t, clientMock, reader, expectedCtx) }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) { waiterMock.On("StopKillWait", expectedCtx, id, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")).Return(nil).Once() }, assertLogOutput: func(t *testing.T, logOutput string) { assert.Contains(t, logOutput, "finished with input error") }, expectedError: nil, }, "output error": { input: input(t, io.EOF), cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { reader := newMockReader(t) reader.On("Read", mock.Anything). Return(0, errors.New("output error")) mockWorkingClient(t, clientMock, reader, expectedCtx) }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) { waiterMock.On("StopKillWait", expectedCtx, id, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")).Return(nil).Once() }, assertLogOutput: func(t *testing.T, logOutput string) { assert.Contains(t, logOutput, "finished with output error") }, expectedError: nil, }, "killWaiter error": { input: input(t, io.EOF), cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { reader := newMockReader(t) reader.On("Read", mock.Anything). Return(0, io.EOF). Once() mockWorkingClient(t, clientMock, reader, expectedCtx) }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) { waiterMock.On("StopKillWait", expectedCtx, id, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")).Return(assert.AnError).Once() }, assertLogOutput: func(t *testing.T, logOutput string) {}, expectedError: assert.AnError, }, "output passed to the writers": { input: input(t, io.EOF), cancelContext: false, setupDockerClient: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { pr, pw := io.Pipe() outWriter := stdcopy.NewStdWriter(pw, stdcopy.Stdout) errWriter := stdcopy.NewStdWriter(pw, stdcopy.Stderr) var wg sync.WaitGroup t.Cleanup(wg.Wait) wg.Add(1) go func() { defer wg.Done() var err error _, err = fmt.Fprintln(outWriter, "out line 1") require.NoError(t, err) _, err = fmt.Fprintln(errWriter, "err line 1") require.NoError(t, err) _, err = fmt.Fprintln(outWriter, "out line 2") require.NoError(t, err) _, err = fmt.Fprintln(errWriter, "err line 2") require.NoError(t, err) err = pw.Close() require.NoError(t, err) }() mockWorkingClient(t, clientMock, pr, expectedCtx) }, setupKillWaiter: func(t *testing.T, waiterMock *wait.MockKillWaiter, expectedCtx context.Context) { waiterMock.On("StopKillWait", expectedCtx, id, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")).Return(nil).Once() }, assertLogOutput: func(t *testing.T, logOutput string) {}, expectedError: nil, expectedStdOut: "out line 1\nout line 2\n", expectedStdErr: "err line 1\nerr line 2\n", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { clientMock := docker.NewMockClient(t) waiterMock := wait.NewMockKillWaiter(t) logger, hook := test.NewNullLogger() logger.SetLevel(logrus.DebugLevel) executorCtx, executorCancelFn := context.WithCancel(t.Context()) defer executorCancelFn() ctx, cancelFn := context.WithCancel(executorCtx) defer cancelFn() outBuf := new(bytes.Buffer) errBuf := new(bytes.Buffer) tt.setupDockerClient(t, clientMock, ctx) tt.setupKillWaiter(t, waiterMock, executorCtx) if tt.cancelContext { cancelFn() } streams := IOStreams{ Stdin: tt.input, Stdout: outBuf, Stderr: errBuf, } dockerExec := NewDocker(executorCtx, clientMock, waiterMock, logger) err := dockerExec.Exec(ctx, id, streams, nil) logOutput := "" for _, entry := range hook.AllEntries() { line, e := entry.String() require.NoError(t, e) logOutput += line } tt.assertLogOutput(t, logOutput) if tt.expectedError != nil { assert.ErrorIs(t, err, tt.expectedError) return } assert.NoError(t, err) assert.Equal(t, tt.expectedStdOut, outBuf.String()) assert.Equal(t, tt.expectedStdErr, errBuf.String()) }) } } ================================================ FILE: executors/docker/internal/exec/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package exec import ( "context" "net" "time" mock "github.com/stretchr/testify/mock" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/wait" ) // newMockConn creates a new instance of mockConn. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockConn(t interface { mock.TestingT Cleanup(func()) }) *mockConn { mock := &mockConn{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockConn is an autogenerated mock type for the conn type type mockConn struct { mock.Mock } type mockConn_Expecter struct { mock *mock.Mock } func (_m *mockConn) EXPECT() *mockConn_Expecter { return &mockConn_Expecter{mock: &_m.Mock} } // Close provides a mock function for the type mockConn func (_mock *mockConn) Close() error { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Close") } var r0 error if returnFunc, ok := ret.Get(0).(func() error); ok { r0 = returnFunc() } else { r0 = ret.Error(0) } return r0 } // mockConn_Close_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Close' type mockConn_Close_Call struct { *mock.Call } // Close is a helper method to define mock.On call func (_e *mockConn_Expecter) Close() *mockConn_Close_Call { return &mockConn_Close_Call{Call: _e.mock.On("Close")} } func (_c *mockConn_Close_Call) Run(run func()) *mockConn_Close_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockConn_Close_Call) Return(err error) *mockConn_Close_Call { _c.Call.Return(err) return _c } func (_c *mockConn_Close_Call) RunAndReturn(run func() error) *mockConn_Close_Call { _c.Call.Return(run) return _c } // LocalAddr provides a mock function for the type mockConn func (_mock *mockConn) LocalAddr() net.Addr { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for LocalAddr") } var r0 net.Addr if returnFunc, ok := ret.Get(0).(func() net.Addr); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(net.Addr) } } return r0 } // mockConn_LocalAddr_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'LocalAddr' type mockConn_LocalAddr_Call struct { *mock.Call } // LocalAddr is a helper method to define mock.On call func (_e *mockConn_Expecter) LocalAddr() *mockConn_LocalAddr_Call { return &mockConn_LocalAddr_Call{Call: _e.mock.On("LocalAddr")} } func (_c *mockConn_LocalAddr_Call) Run(run func()) *mockConn_LocalAddr_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockConn_LocalAddr_Call) Return(addr net.Addr) *mockConn_LocalAddr_Call { _c.Call.Return(addr) return _c } func (_c *mockConn_LocalAddr_Call) RunAndReturn(run func() net.Addr) *mockConn_LocalAddr_Call { _c.Call.Return(run) return _c } // Read provides a mock function for the type mockConn func (_mock *mockConn) Read(b []byte) (int, error) { ret := _mock.Called(b) if len(ret) == 0 { panic("no return value specified for Read") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(b) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(b) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(b) } else { r1 = ret.Error(1) } return r0, r1 } // mockConn_Read_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Read' type mockConn_Read_Call struct { *mock.Call } // Read is a helper method to define mock.On call // - b []byte func (_e *mockConn_Expecter) Read(b interface{}) *mockConn_Read_Call { return &mockConn_Read_Call{Call: _e.mock.On("Read", b)} } func (_c *mockConn_Read_Call) Run(run func(b []byte)) *mockConn_Read_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *mockConn_Read_Call) Return(n int, err error) *mockConn_Read_Call { _c.Call.Return(n, err) return _c } func (_c *mockConn_Read_Call) RunAndReturn(run func(b []byte) (int, error)) *mockConn_Read_Call { _c.Call.Return(run) return _c } // RemoteAddr provides a mock function for the type mockConn func (_mock *mockConn) RemoteAddr() net.Addr { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for RemoteAddr") } var r0 net.Addr if returnFunc, ok := ret.Get(0).(func() net.Addr); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(net.Addr) } } return r0 } // mockConn_RemoteAddr_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoteAddr' type mockConn_RemoteAddr_Call struct { *mock.Call } // RemoteAddr is a helper method to define mock.On call func (_e *mockConn_Expecter) RemoteAddr() *mockConn_RemoteAddr_Call { return &mockConn_RemoteAddr_Call{Call: _e.mock.On("RemoteAddr")} } func (_c *mockConn_RemoteAddr_Call) Run(run func()) *mockConn_RemoteAddr_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *mockConn_RemoteAddr_Call) Return(addr net.Addr) *mockConn_RemoteAddr_Call { _c.Call.Return(addr) return _c } func (_c *mockConn_RemoteAddr_Call) RunAndReturn(run func() net.Addr) *mockConn_RemoteAddr_Call { _c.Call.Return(run) return _c } // SetDeadline provides a mock function for the type mockConn func (_mock *mockConn) SetDeadline(t time.Time) error { ret := _mock.Called(t) if len(ret) == 0 { panic("no return value specified for SetDeadline") } var r0 error if returnFunc, ok := ret.Get(0).(func(time.Time) error); ok { r0 = returnFunc(t) } else { r0 = ret.Error(0) } return r0 } // mockConn_SetDeadline_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetDeadline' type mockConn_SetDeadline_Call struct { *mock.Call } // SetDeadline is a helper method to define mock.On call // - t time.Time func (_e *mockConn_Expecter) SetDeadline(t interface{}) *mockConn_SetDeadline_Call { return &mockConn_SetDeadline_Call{Call: _e.mock.On("SetDeadline", t)} } func (_c *mockConn_SetDeadline_Call) Run(run func(t time.Time)) *mockConn_SetDeadline_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 time.Time if args[0] != nil { arg0 = args[0].(time.Time) } run( arg0, ) }) return _c } func (_c *mockConn_SetDeadline_Call) Return(err error) *mockConn_SetDeadline_Call { _c.Call.Return(err) return _c } func (_c *mockConn_SetDeadline_Call) RunAndReturn(run func(t time.Time) error) *mockConn_SetDeadline_Call { _c.Call.Return(run) return _c } // SetReadDeadline provides a mock function for the type mockConn func (_mock *mockConn) SetReadDeadline(t time.Time) error { ret := _mock.Called(t) if len(ret) == 0 { panic("no return value specified for SetReadDeadline") } var r0 error if returnFunc, ok := ret.Get(0).(func(time.Time) error); ok { r0 = returnFunc(t) } else { r0 = ret.Error(0) } return r0 } // mockConn_SetReadDeadline_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetReadDeadline' type mockConn_SetReadDeadline_Call struct { *mock.Call } // SetReadDeadline is a helper method to define mock.On call // - t time.Time func (_e *mockConn_Expecter) SetReadDeadline(t interface{}) *mockConn_SetReadDeadline_Call { return &mockConn_SetReadDeadline_Call{Call: _e.mock.On("SetReadDeadline", t)} } func (_c *mockConn_SetReadDeadline_Call) Run(run func(t time.Time)) *mockConn_SetReadDeadline_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 time.Time if args[0] != nil { arg0 = args[0].(time.Time) } run( arg0, ) }) return _c } func (_c *mockConn_SetReadDeadline_Call) Return(err error) *mockConn_SetReadDeadline_Call { _c.Call.Return(err) return _c } func (_c *mockConn_SetReadDeadline_Call) RunAndReturn(run func(t time.Time) error) *mockConn_SetReadDeadline_Call { _c.Call.Return(run) return _c } // SetWriteDeadline provides a mock function for the type mockConn func (_mock *mockConn) SetWriteDeadline(t time.Time) error { ret := _mock.Called(t) if len(ret) == 0 { panic("no return value specified for SetWriteDeadline") } var r0 error if returnFunc, ok := ret.Get(0).(func(time.Time) error); ok { r0 = returnFunc(t) } else { r0 = ret.Error(0) } return r0 } // mockConn_SetWriteDeadline_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SetWriteDeadline' type mockConn_SetWriteDeadline_Call struct { *mock.Call } // SetWriteDeadline is a helper method to define mock.On call // - t time.Time func (_e *mockConn_Expecter) SetWriteDeadline(t interface{}) *mockConn_SetWriteDeadline_Call { return &mockConn_SetWriteDeadline_Call{Call: _e.mock.On("SetWriteDeadline", t)} } func (_c *mockConn_SetWriteDeadline_Call) Run(run func(t time.Time)) *mockConn_SetWriteDeadline_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 time.Time if args[0] != nil { arg0 = args[0].(time.Time) } run( arg0, ) }) return _c } func (_c *mockConn_SetWriteDeadline_Call) Return(err error) *mockConn_SetWriteDeadline_Call { _c.Call.Return(err) return _c } func (_c *mockConn_SetWriteDeadline_Call) RunAndReturn(run func(t time.Time) error) *mockConn_SetWriteDeadline_Call { _c.Call.Return(run) return _c } // Write provides a mock function for the type mockConn func (_mock *mockConn) Write(b []byte) (int, error) { ret := _mock.Called(b) if len(ret) == 0 { panic("no return value specified for Write") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(b) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(b) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(b) } else { r1 = ret.Error(1) } return r0, r1 } // mockConn_Write_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Write' type mockConn_Write_Call struct { *mock.Call } // Write is a helper method to define mock.On call // - b []byte func (_e *mockConn_Expecter) Write(b interface{}) *mockConn_Write_Call { return &mockConn_Write_Call{Call: _e.mock.On("Write", b)} } func (_c *mockConn_Write_Call) Run(run func(b []byte)) *mockConn_Write_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *mockConn_Write_Call) Return(n int, err error) *mockConn_Write_Call { _c.Call.Return(n, err) return _c } func (_c *mockConn_Write_Call) RunAndReturn(run func(b []byte) (int, error)) *mockConn_Write_Call { _c.Call.Return(run) return _c } // newMockReader creates a new instance of mockReader. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockReader(t interface { mock.TestingT Cleanup(func()) }) *mockReader { mock := &mockReader{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockReader is an autogenerated mock type for the reader type type mockReader struct { mock.Mock } type mockReader_Expecter struct { mock *mock.Mock } func (_m *mockReader) EXPECT() *mockReader_Expecter { return &mockReader_Expecter{mock: &_m.Mock} } // Read provides a mock function for the type mockReader func (_mock *mockReader) Read(p []byte) (int, error) { ret := _mock.Called(p) if len(ret) == 0 { panic("no return value specified for Read") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func([]byte) (int, error)); ok { return returnFunc(p) } if returnFunc, ok := ret.Get(0).(func([]byte) int); ok { r0 = returnFunc(p) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func([]byte) error); ok { r1 = returnFunc(p) } else { r1 = ret.Error(1) } return r0, r1 } // mockReader_Read_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Read' type mockReader_Read_Call struct { *mock.Call } // Read is a helper method to define mock.On call // - p []byte func (_e *mockReader_Expecter) Read(p interface{}) *mockReader_Read_Call { return &mockReader_Read_Call{Call: _e.mock.On("Read", p)} } func (_c *mockReader_Read_Call) Run(run func(p []byte)) *mockReader_Read_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []byte if args[0] != nil { arg0 = args[0].([]byte) } run( arg0, ) }) return _c } func (_c *mockReader_Read_Call) Return(n int, err error) *mockReader_Read_Call { _c.Call.Return(n, err) return _c } func (_c *mockReader_Read_Call) RunAndReturn(run func(p []byte) (int, error)) *mockReader_Read_Call { _c.Call.Return(run) return _c } // NewMockDocker creates a new instance of MockDocker. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockDocker(t interface { mock.TestingT Cleanup(func()) }) *MockDocker { mock := &MockDocker{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockDocker is an autogenerated mock type for the Docker type type MockDocker struct { mock.Mock } type MockDocker_Expecter struct { mock *mock.Mock } func (_m *MockDocker) EXPECT() *MockDocker_Expecter { return &MockDocker_Expecter{mock: &_m.Mock} } // Exec provides a mock function for the type MockDocker func (_mock *MockDocker) Exec(ctx context.Context, containerID string, streams IOStreams, gracefulExitFunc wait.GracefulExitFunc) error { ret := _mock.Called(ctx, containerID, streams, gracefulExitFunc) if len(ret) == 0 { panic("no return value specified for Exec") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context, string, IOStreams, wait.GracefulExitFunc) error); ok { r0 = returnFunc(ctx, containerID, streams, gracefulExitFunc) } else { r0 = ret.Error(0) } return r0 } // MockDocker_Exec_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Exec' type MockDocker_Exec_Call struct { *mock.Call } // Exec is a helper method to define mock.On call // - ctx context.Context // - containerID string // - streams IOStreams // - gracefulExitFunc wait.GracefulExitFunc func (_e *MockDocker_Expecter) Exec(ctx interface{}, containerID interface{}, streams interface{}, gracefulExitFunc interface{}) *MockDocker_Exec_Call { return &MockDocker_Exec_Call{Call: _e.mock.On("Exec", ctx, containerID, streams, gracefulExitFunc)} } func (_c *MockDocker_Exec_Call) Run(run func(ctx context.Context, containerID string, streams IOStreams, gracefulExitFunc wait.GracefulExitFunc)) *MockDocker_Exec_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 IOStreams if args[2] != nil { arg2 = args[2].(IOStreams) } var arg3 wait.GracefulExitFunc if args[3] != nil { arg3 = args[3].(wait.GracefulExitFunc) } run( arg0, arg1, arg2, arg3, ) }) return _c } func (_c *MockDocker_Exec_Call) Return(err error) *MockDocker_Exec_Call { _c.Call.Return(err) return _c } func (_c *MockDocker_Exec_Call) RunAndReturn(run func(ctx context.Context, containerID string, streams IOStreams, gracefulExitFunc wait.GracefulExitFunc) error) *MockDocker_Exec_Call { _c.Call.Return(run) return _c } ================================================ FILE: executors/docker/internal/labels/labels.go ================================================ package labels import ( "fmt" "strconv" "gitlab.com/gitlab-org/gitlab-runner/common" ) const dockerLabelPrefix = "com.gitlab.gitlab-runner" // Labeler is responsible for handling labelling logic for docker entities - networks, containers. type Labeler interface { Labels(otherLabels map[string]string) map[string]string } // NewLabeler returns a new instance of a Labeler bound to this build. func NewLabeler(b *common.Build) Labeler { return &labeler{ build: b, } } type labeler struct { build *common.Build } // Labels returns a map of label to value to be applied to docker entities. // Includes a set of defaults. Add additional ones or overwrites in the provided map. func (l *labeler) Labels(otherLabels map[string]string) map[string]string { pipelineID := l.build.GetAllVariables().Value("CI_PIPELINE_ID") if l.build.JobInfo.PipelineID > 0 { pipelineID = strconv.FormatInt(l.build.JobInfo.PipelineID, 10) } labels := map[string]string{ dockerLabelPrefix + ".job.id": strconv.FormatInt(l.build.ID, 10), dockerLabelPrefix + ".job.url": l.build.JobURL(), dockerLabelPrefix + ".job.sha": l.build.GitInfo.Sha, dockerLabelPrefix + ".job.before_sha": l.build.GitInfo.BeforeSha, dockerLabelPrefix + ".job.ref": l.build.GitInfo.Ref, dockerLabelPrefix + ".job.timeout": l.build.GetBuildTimeout().String(), dockerLabelPrefix + ".project.id": strconv.FormatInt(l.build.JobInfo.ProjectID, 10), dockerLabelPrefix + ".project.runner_id": strconv.Itoa(l.build.ProjectRunnerID), dockerLabelPrefix + ".pipeline.id": pipelineID, dockerLabelPrefix + ".runner.id": l.build.Runner.ShortDescription(), dockerLabelPrefix + ".runner.local_id": strconv.Itoa(l.build.RunnerID), dockerLabelPrefix + ".runner.system_id": l.build.Runner.SystemID, dockerLabelPrefix + ".managed": "true", } for k, v := range otherLabels { labels[fmt.Sprintf("%s.%s", dockerLabelPrefix, k)] = v } return labels } ================================================ FILE: executors/docker/internal/labels/labels_test.go ================================================ //go:build !integration package labels import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) func TestNewLabeler(t *testing.T) { l := NewLabeler(&common.Build{}) assert.IsType(t, new(labeler), l) } func TestLabels(t *testing.T) { b := &common.Build{ Job: spec.Job{ ID: 12345, GitInfo: spec.GitInfo{ Sha: "sha", BeforeSha: "before-sha", Ref: "ref", RepoURL: "https://ci-job-token:ToKeN123@gitlab.example.com/namespace/project.git", }, JobInfo: spec.JobInfo{ ProjectID: 123456, }, }, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, SystemID: "some-system-ID", }, RunnerID: 123, ProjectRunnerID: 456, } l := NewLabeler(b) expected := map[string]string{ "com.gitlab.gitlab-runner.job.id": "12345", "com.gitlab.gitlab-runner.job.url": "https://gitlab.example.com/namespace/project/-/jobs/12345", "com.gitlab.gitlab-runner.job.sha": "sha", "com.gitlab.gitlab-runner.job.before_sha": "before-sha", "com.gitlab.gitlab-runner.job.ref": "ref", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.project.id": "123456", "com.gitlab.gitlab-runner.project.runner_id": "456", "com.gitlab.gitlab-runner.pipeline.id": "", "com.gitlab.gitlab-runner.runner.id": "test-toke", "com.gitlab.gitlab-runner.runner.local_id": "123", "com.gitlab.gitlab-runner.runner.system_id": "some-system-ID", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.other.label1": "1", "com.gitlab.gitlab-runner.other.label2": "2", } actual := l.Labels(map[string]string{"other.label1": "1", "other.label2": "2"}) assert.Equal(t, expected, actual) } func TestLabels_pipelineIDSupport(t *testing.T) { tests := map[string]struct { pipelineIDInJobPayload *int64 pipelineIDInJobVariable *spec.Variable expectedPipelineIDLabel string }{ "pipelineID in job payload only": { pipelineIDInJobPayload: func(i int64) *int64 { return &i }(987654321), expectedPipelineIDLabel: "987654321", }, "pipelineID in job variable only": { pipelineIDInJobVariable: &spec.Variable{ Key: "CI_PIPELINE_ID", Value: "123456789", }, expectedPipelineIDLabel: "123456789", }, "pipelineID in job variable and job payload": { pipelineIDInJobPayload: func(i int64) *int64 { return &i }(987654321), pipelineIDInJobVariable: &spec.Variable{ Key: "CI_PIPELINE_ID", Value: "123456789", }, expectedPipelineIDLabel: "987654321", }, "pipelineID not present at all": { expectedPipelineIDLabel: "", }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { b := &common.Build{ Job: spec.Job{ ID: 12345, GitInfo: spec.GitInfo{ Sha: "sha", BeforeSha: "before-sha", Ref: "ref", RepoURL: "https://ci-job-token:ToKeN123@gitlab.example.com/namespace/project.git", }, JobInfo: spec.JobInfo{ ProjectID: 123456, }, Variables: make([]spec.Variable, 0, 1), }, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{ Token: "test-token", }, SystemID: "some-system-ID", }, RunnerID: 123, ProjectRunnerID: 456, } if tt.pipelineIDInJobPayload != nil { b.Job.JobInfo.PipelineID = *tt.pipelineIDInJobPayload } if tt.pipelineIDInJobVariable != nil { b.Job.Variables = append(b.Job.Variables, *tt.pipelineIDInJobVariable) } l := NewLabeler(b) labels := l.Labels(map[string]string{"other": "label"}) t.Log(labels) pipelineIDLabelKey := dockerLabelPrefix + ".pipeline.id" require.Contains(t, labels, pipelineIDLabelKey) assert.Equal(t, tt.expectedPipelineIDLabel, labels[pipelineIDLabelKey]) }) } } ================================================ FILE: executors/docker/internal/labels/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package labels import ( mock "github.com/stretchr/testify/mock" ) // NewMockLabeler creates a new instance of MockLabeler. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockLabeler(t interface { mock.TestingT Cleanup(func()) }) *MockLabeler { mock := &MockLabeler{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockLabeler is an autogenerated mock type for the Labeler type type MockLabeler struct { mock.Mock } type MockLabeler_Expecter struct { mock *mock.Mock } func (_m *MockLabeler) EXPECT() *MockLabeler_Expecter { return &MockLabeler_Expecter{mock: &_m.Mock} } // Labels provides a mock function for the type MockLabeler func (_mock *MockLabeler) Labels(otherLabels map[string]string) map[string]string { ret := _mock.Called(otherLabels) if len(ret) == 0 { panic("no return value specified for Labels") } var r0 map[string]string if returnFunc, ok := ret.Get(0).(func(map[string]string) map[string]string); ok { r0 = returnFunc(otherLabels) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(map[string]string) } } return r0 } // MockLabeler_Labels_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Labels' type MockLabeler_Labels_Call struct { *mock.Call } // Labels is a helper method to define mock.On call // - otherLabels map[string]string func (_e *MockLabeler_Expecter) Labels(otherLabels interface{}) *MockLabeler_Labels_Call { return &MockLabeler_Labels_Call{Call: _e.mock.On("Labels", otherLabels)} } func (_c *MockLabeler_Labels_Call) Run(run func(otherLabels map[string]string)) *MockLabeler_Labels_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 map[string]string if args[0] != nil { arg0 = args[0].(map[string]string) } run( arg0, ) }) return _c } func (_c *MockLabeler_Labels_Call) Return(stringToString map[string]string) *MockLabeler_Labels_Call { _c.Call.Return(stringToString) return _c } func (_c *MockLabeler_Labels_Call) RunAndReturn(run func(otherLabels map[string]string) map[string]string) *MockLabeler_Labels_Call { _c.Call.Return(run) return _c } ================================================ FILE: executors/docker/internal/networks/manager.go ================================================ package networks import ( "context" "errors" "fmt" "strconv" "github.com/docker/docker/api/types/container" network "github.com/docker/docker/api/types/network" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) var errBuildNetworkExists = errors.New("build network is not empty") type Manager interface { Create(ctx context.Context, networkMode string, enableIPv6 bool) (container.NetworkMode, error) Inspect(ctx context.Context) (network.Inspect, error) Cleanup(ctx context.Context) error } type manager struct { logger debugLogger client docker.Client build *common.Build labeler labels.Labeler networkMode container.NetworkMode buildNetwork network.Inspect perBuild bool } func NewManager(logger debugLogger, dockerClient docker.Client, build *common.Build, labeler labels.Labeler) Manager { return &manager{ logger: logger, client: dockerClient, build: build, labeler: labeler, } } func (m *manager) Create(ctx context.Context, networkMode string, enableIPv6 bool) (container.NetworkMode, error) { m.networkMode = container.NetworkMode(networkMode) m.perBuild = false if networkMode != "" { return m.networkMode, nil } if !m.build.IsFeatureFlagOn(featureflags.NetworkPerBuild) { return m.networkMode, nil } if m.buildNetwork.ID != "" { return "", errBuildNetworkExists } networkName := m.build.GetNetworkName() m.logger.Debugln("Creating build network ", networkName) networkResponse, err := m.client.NetworkCreate( ctx, networkName, network.CreateOptions{ Labels: m.labeler.Labels(map[string]string{}), EnableIPv6: &enableIPv6, Options: networkOptionsFromConfig(m.build.Runner.Docker), }, ) if err != nil { return "", err } // Inspect the created network to save its details m.buildNetwork, err = m.client.NetworkInspect(ctx, networkResponse.ID) if err != nil { return "", err } m.networkMode = container.NetworkMode(networkName) m.perBuild = true return m.networkMode, nil } func networkOptionsFromConfig(config *common.DockerConfig) map[string]string { networkOptions := make(map[string]string) if config != nil && config.NetworkMTU != 0 { networkOptions["com.docker.network.driver.mtu"] = strconv.Itoa(config.NetworkMTU) } return networkOptions } func (m *manager) Inspect(ctx context.Context) (network.Inspect, error) { if !m.perBuild { return network.Inspect{}, nil } m.logger.Debugln("Inspect docker network: ", m.buildNetwork.ID) return m.client.NetworkInspect(ctx, m.buildNetwork.ID) } func (m *manager) Cleanup(ctx context.Context) error { if !m.build.IsFeatureFlagOn(featureflags.NetworkPerBuild) { return nil } if !m.perBuild { return nil } m.logger.Debugln("Removing network: ", m.buildNetwork.ID) err := m.client.NetworkRemove(ctx, m.buildNetwork.ID) if err != nil { return fmt.Errorf("docker remove network %s: %w", m.buildNetwork.ID, err) } return nil } ================================================ FILE: executors/docker/internal/networks/manager_integration_test.go ================================================ //go:build integration package networks_test import ( "context" "testing" "github.com/docker/docker/api/types/container" logrustest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/networks" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) func TestCreateNetworkLabels(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") successfulJobResponse, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "should be able to connect to docker") defer client.Close() successfulJobResponse.GitInfo.RepoURL = "https://user:pass@gitlab.example.com/namespace/project.git" build := &common.Build{ ProjectRunnerID: 0, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{Token: "test-token"}, }, Job: successfulJobResponse, } build.Variables = spec.Variables{ {Key: featureflags.NetworkPerBuild, Value: "true"}, {Key: "CI_PIPELINE_ID", Value: "1"}, } logger, _ := logrustest.NewNullLogger() manager := networks.NewManager(logger, client, build, labels.NewLabeler(build)) ctx := context.Background() networkMode, err := manager.Create(ctx, "", false) assert.NoError(t, err) assert.Equal(t, container.NetworkMode("runner-test-toke-0-0-0"), networkMode) network, err := manager.Inspect(ctx) assert.NoError(t, err) assert.Equal(t, map[string]string{ "com.gitlab.gitlab-runner.job.before_sha": "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7", "com.gitlab.gitlab-runner.job.id": "0", "com.gitlab.gitlab-runner.job.url": "https://gitlab.example.com/namespace/project/-/jobs/0", "com.gitlab.gitlab-runner.job.ref": "main", "com.gitlab.gitlab-runner.job.sha": "69b18e5ed3610cf646119c3e38f462c64ec462b7", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.pipeline.id": "1", "com.gitlab.gitlab-runner.project.id": "0", "com.gitlab.gitlab-runner.project.runner_id": "0", "com.gitlab.gitlab-runner.runner.id": "test-toke", "com.gitlab.gitlab-runner.runner.local_id": "0", "com.gitlab.gitlab-runner.runner.system_id": "", }, network.Labels) err = manager.Cleanup(ctx) assert.NoError(t, err) } ================================================ FILE: executors/docker/internal/networks/manager_test.go ================================================ //go:build !integration package networks import ( "errors" "strconv" "testing" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/network" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/featureflags" ) func TestNewDefaultManager(t *testing.T) { logger := newMockDebugLogger(t) m := NewManager(logger, nil, nil, nil) assert.IsType(t, &manager{}, m) } func newDefaultManager(t *testing.T) *manager { b := &common.Build{ ProjectRunnerID: 0, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{Token: "test-token"}, }, Job: spec.Job{ JobInfo: spec.JobInfo{ ProjectID: 0, }, }, } loggerMock := newMockDebugLogger(t) loggerMock.On("Debugln", mock.Anything, mock.Anything).Maybe() m := &manager{ logger: loggerMock, build: b, labeler: labels.NewLabeler(b), } return m } func addClient(t *testing.T, manager *manager) *docker.MockClient { client := docker.NewMockClient(t) manager.client = client return client } func TestCreateNetwork(t *testing.T) { testCases := map[string]struct { networkMode string networkPerBuild string buildNetwork network.Inspect enableIPv6 bool expectedNetworkMode container.NetworkMode expectedErr error clientAssertions func(*docker.MockClient) }{ "network specified": { networkMode: "default", expectedNetworkMode: container.NetworkMode("default"), }, "network create per build with network mode": { networkMode: "default", networkPerBuild: "true", expectedNetworkMode: container.NetworkMode("default"), }, "network per-build flag off": { networkMode: "", networkPerBuild: "false", expectedNetworkMode: container.NetworkMode(""), }, "network create per-build network": { networkMode: "", networkPerBuild: "true", expectedNetworkMode: container.NetworkMode("runner-test-toke-0-0-0"), clientAssertions: func(mc *docker.MockClient) { mc.On( "NetworkCreate", mock.Anything, mock.AnythingOfType("string"), mock.AnythingOfType("network.CreateOptions"), ). Return(network.CreateResponse{ID: "test-network"}, nil). Once() mc.On("NetworkInspect", mock.Anything, mock.AnythingOfType("string")). Return(network.Inspect{ ID: "test-network", Name: "test-network", }, nil). Once() }, }, "network create per-build network failure": { networkMode: "", networkPerBuild: "true", expectedNetworkMode: "", expectedErr: errors.New("test-network failed"), clientAssertions: func(mc *docker.MockClient) { mc.On( "NetworkCreate", mock.Anything, mock.AnythingOfType("string"), mock.AnythingOfType("network.CreateOptions"), ). Return(network.CreateResponse{ID: "test-network"}, errors.New("test-network failed")). Once() }, }, "network create per-build inspect failure": { networkMode: "", networkPerBuild: "true", expectedNetworkMode: "", expectedErr: errors.New("network-inspect-failed"), clientAssertions: func(mc *docker.MockClient) { mc.On( "NetworkCreate", mock.Anything, mock.AnythingOfType("string"), mock.AnythingOfType("network.CreateOptions"), ). Return(network.CreateResponse{ID: "test-network"}, nil). Once() mc.On( "NetworkInspect", mock.Anything, mock.AnythingOfType("string"), ). Return(network.Inspect{}, errors.New("network-inspect-failed")). Once() }, }, "networkID already set": { networkMode: "", networkPerBuild: "true", buildNetwork: network.Inspect{ ID: "some-id", }, expectedNetworkMode: "", expectedErr: errBuildNetworkExists, }, "IPv6 network created": { networkMode: "", networkPerBuild: "true", expectedNetworkMode: container.NetworkMode("runner-test-toke-0-0-0"), enableIPv6: true, clientAssertions: func(mc *docker.MockClient) { mc.On( "NetworkCreate", mock.Anything, mock.AnythingOfType("string"), mock.AnythingOfType("network.CreateOptions"), ). Return(network.CreateResponse{ID: "test-network"}, nil). Once() mc.On("NetworkInspect", mock.Anything, mock.AnythingOfType("string")). Return(network.Inspect{ ID: "test-network", Name: "test-network", EnableIPv6: true, }, nil). Once() }, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { m := newDefaultManager(t) m.build.ID = 0 m.buildNetwork = testCase.buildNetwork client := addClient(t, m) m.build.Variables = append(m.build.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: testCase.networkPerBuild, }) if testCase.clientAssertions != nil { testCase.clientAssertions(client) } networkMode, err := m.Create(t.Context(), testCase.networkMode, testCase.enableIPv6) assert.Equal(t, testCase.expectedNetworkMode, networkMode) assert.Equal(t, testCase.expectedErr, err) }) } } func TestCreateNetworkWithCustomMTU(t *testing.T) { testCases := map[string]struct { networkPerBuild bool mtu int expectedMTU int }{ "feature-flag is enabled, with mtu": { networkPerBuild: true, mtu: 1402, expectedMTU: 1402, }, "feature-flag is enabled, no mtu": { networkPerBuild: true, }, "feature-flag disabled": { mtu: 1234, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { m := newDefaultManager(t) m.build.ID = 0 client := addClient(t, m) m.build.Runner.Docker = &common.DockerConfig{NetworkMTU: testCase.mtu} var receivedMTU int if testCase.networkPerBuild { m.build.Variables = append(m.build.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: "true", }) client.On("NetworkCreate", mock.Anything, mock.AnythingOfType("string"), mock.AnythingOfType("network.CreateOptions")). Run(func(args mock.Arguments) { arg, ok := args.Get(2).(network.CreateOptions) require.True(t, ok) if testCase.mtu != 0 { mtu, ok := arg.Options["com.docker.network.driver.mtu"] require.True(t, ok) var err error receivedMTU, err = strconv.Atoi(mtu) assert.NoError(t, err) } else { _, ok := arg.Options["com.docker.network.driver.mtu"] require.False(t, ok) } }). Return(network.CreateResponse{ID: "test-network"}, nil). Once() client.On("NetworkInspect", mock.Anything, mock.AnythingOfType("string")). Return(network.Inspect{ ID: "test-network", Name: "test-network", }, nil). Once() } _, err := m.Create(t.Context(), "", false) assert.Equal(t, testCase.expectedMTU, receivedMTU) assert.NoError(t, err) }) } } func TestInspectNetwork(t *testing.T) { networkName := "test-network" testError := errors.New("failure") testCases := map[string]struct { perBuild bool clientAssertions func(client *docker.MockClient) expectedResponse network.Inspect expectedErr error }{ "network per build": { perBuild: false, expectedResponse: network.Inspect{}, expectedErr: nil, }, "no network per build": { perBuild: true, clientAssertions: func(m *docker.MockClient) { m.On("NetworkInspect", mock.Anything, mock.AnythingOfType("string")). Return(network.Inspect{ ID: networkName, Name: networkName, }, nil). Once() }, expectedResponse: network.Inspect{ ID: networkName, Name: networkName, }, expectedErr: nil, }, "network inspect failed": { perBuild: true, clientAssertions: func(m *docker.MockClient) { m.On("NetworkInspect", mock.Anything, mock.AnythingOfType("string")). Return(network.Inspect{}, testError) }, expectedErr: testError, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { m := newDefaultManager(t) m.perBuild = testCase.perBuild client := addClient(t, m) if testCase.clientAssertions != nil { testCase.clientAssertions(client) } inspectResponse, err := m.Inspect(t.Context()) assert.Equal(t, testCase.expectedResponse, inspectResponse) assert.ErrorIs(t, err, testCase.expectedErr) }) } } func TestCleanupNetwork(t *testing.T) { testErr := errors.New("test-error") testCases := map[string]struct { networkMode string networkPerBuild string clientAssertions func(*docker.MockClient) expectErr error }{ "network per-build flag off": { networkPerBuild: "false", }, "network per-build flag on with defined network": { networkPerBuild: "true", networkMode: "default", }, "cleanup per-build network": { networkPerBuild: "true", clientAssertions: func(mc *docker.MockClient) { mc.On("NetworkRemove", mock.Anything, mock.AnythingOfType("string")). Return(nil). Once() }, }, "cleanup per-build error": { networkPerBuild: "true", clientAssertions: func(mc *docker.MockClient) { mc.On("NetworkRemove", mock.Anything, mock.AnythingOfType("string")). Return(testErr). Once() }, expectErr: testErr, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { m := newDefaultManager(t) m.build.ID = 0 client := addClient(t, m) m.build.Variables = append(m.build.Variables, spec.Variable{ Key: featureflags.NetworkPerBuild, Value: testCase.networkPerBuild, }) if testCase.networkPerBuild == "true" { if testCase.networkMode == "" { m.perBuild = true } } if testCase.clientAssertions != nil { testCase.clientAssertions(client) } err := m.Cleanup(t.Context()) assert.ErrorIs(t, err, testCase.expectErr) }) } } ================================================ FILE: executors/docker/internal/networks/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package networks import ( "context" "github.com/docker/docker/api/types/container" "github.com/docker/docker/api/types/network" mock "github.com/stretchr/testify/mock" ) // NewMockManager creates a new instance of MockManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockManager(t interface { mock.TestingT Cleanup(func()) }) *MockManager { mock := &MockManager{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockManager is an autogenerated mock type for the Manager type type MockManager struct { mock.Mock } type MockManager_Expecter struct { mock *mock.Mock } func (_m *MockManager) EXPECT() *MockManager_Expecter { return &MockManager_Expecter{mock: &_m.Mock} } // Cleanup provides a mock function for the type MockManager func (_mock *MockManager) Cleanup(ctx context.Context) error { ret := _mock.Called(ctx) if len(ret) == 0 { panic("no return value specified for Cleanup") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context) error); ok { r0 = returnFunc(ctx) } else { r0 = ret.Error(0) } return r0 } // MockManager_Cleanup_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Cleanup' type MockManager_Cleanup_Call struct { *mock.Call } // Cleanup is a helper method to define mock.On call // - ctx context.Context func (_e *MockManager_Expecter) Cleanup(ctx interface{}) *MockManager_Cleanup_Call { return &MockManager_Cleanup_Call{Call: _e.mock.On("Cleanup", ctx)} } func (_c *MockManager_Cleanup_Call) Run(run func(ctx context.Context)) *MockManager_Cleanup_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockManager_Cleanup_Call) Return(err error) *MockManager_Cleanup_Call { _c.Call.Return(err) return _c } func (_c *MockManager_Cleanup_Call) RunAndReturn(run func(ctx context.Context) error) *MockManager_Cleanup_Call { _c.Call.Return(run) return _c } // Create provides a mock function for the type MockManager func (_mock *MockManager) Create(ctx context.Context, networkMode string, enableIPv6 bool) (container.NetworkMode, error) { ret := _mock.Called(ctx, networkMode, enableIPv6) if len(ret) == 0 { panic("no return value specified for Create") } var r0 container.NetworkMode var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string, bool) (container.NetworkMode, error)); ok { return returnFunc(ctx, networkMode, enableIPv6) } if returnFunc, ok := ret.Get(0).(func(context.Context, string, bool) container.NetworkMode); ok { r0 = returnFunc(ctx, networkMode, enableIPv6) } else { r0 = ret.Get(0).(container.NetworkMode) } if returnFunc, ok := ret.Get(1).(func(context.Context, string, bool) error); ok { r1 = returnFunc(ctx, networkMode, enableIPv6) } else { r1 = ret.Error(1) } return r0, r1 } // MockManager_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' type MockManager_Create_Call struct { *mock.Call } // Create is a helper method to define mock.On call // - ctx context.Context // - networkMode string // - enableIPv6 bool func (_e *MockManager_Expecter) Create(ctx interface{}, networkMode interface{}, enableIPv6 interface{}) *MockManager_Create_Call { return &MockManager_Create_Call{Call: _e.mock.On("Create", ctx, networkMode, enableIPv6)} } func (_c *MockManager_Create_Call) Run(run func(ctx context.Context, networkMode string, enableIPv6 bool)) *MockManager_Create_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } var arg2 bool if args[2] != nil { arg2 = args[2].(bool) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockManager_Create_Call) Return(networkMode1 container.NetworkMode, err error) *MockManager_Create_Call { _c.Call.Return(networkMode1, err) return _c } func (_c *MockManager_Create_Call) RunAndReturn(run func(ctx context.Context, networkMode string, enableIPv6 bool) (container.NetworkMode, error)) *MockManager_Create_Call { _c.Call.Return(run) return _c } // Inspect provides a mock function for the type MockManager func (_mock *MockManager) Inspect(ctx context.Context) (network.Inspect, error) { ret := _mock.Called(ctx) if len(ret) == 0 { panic("no return value specified for Inspect") } var r0 network.Inspect var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context) (network.Inspect, error)); ok { return returnFunc(ctx) } if returnFunc, ok := ret.Get(0).(func(context.Context) network.Inspect); ok { r0 = returnFunc(ctx) } else { r0 = ret.Get(0).(network.Inspect) } if returnFunc, ok := ret.Get(1).(func(context.Context) error); ok { r1 = returnFunc(ctx) } else { r1 = ret.Error(1) } return r0, r1 } // MockManager_Inspect_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Inspect' type MockManager_Inspect_Call struct { *mock.Call } // Inspect is a helper method to define mock.On call // - ctx context.Context func (_e *MockManager_Expecter) Inspect(ctx interface{}) *MockManager_Inspect_Call { return &MockManager_Inspect_Call{Call: _e.mock.On("Inspect", ctx)} } func (_c *MockManager_Inspect_Call) Run(run func(ctx context.Context)) *MockManager_Inspect_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockManager_Inspect_Call) Return(inspect network.Inspect, err error) *MockManager_Inspect_Call { _c.Call.Return(inspect, err) return _c } func (_c *MockManager_Inspect_Call) RunAndReturn(run func(ctx context.Context) (network.Inspect, error)) *MockManager_Inspect_Call { _c.Call.Return(run) return _c } // newMockDebugLogger creates a new instance of mockDebugLogger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockDebugLogger(t interface { mock.TestingT Cleanup(func()) }) *mockDebugLogger { mock := &mockDebugLogger{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockDebugLogger is an autogenerated mock type for the debugLogger type type mockDebugLogger struct { mock.Mock } type mockDebugLogger_Expecter struct { mock *mock.Mock } func (_m *mockDebugLogger) EXPECT() *mockDebugLogger_Expecter { return &mockDebugLogger_Expecter{mock: &_m.Mock} } // Debugln provides a mock function for the type mockDebugLogger func (_mock *mockDebugLogger) Debugln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockDebugLogger_Debugln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Debugln' type mockDebugLogger_Debugln_Call struct { *mock.Call } // Debugln is a helper method to define mock.On call // - args ...interface{} func (_e *mockDebugLogger_Expecter) Debugln(args ...interface{}) *mockDebugLogger_Debugln_Call { return &mockDebugLogger_Debugln_Call{Call: _e.mock.On("Debugln", append([]interface{}{}, args...)...)} } func (_c *mockDebugLogger_Debugln_Call) Run(run func(args ...interface{})) *mockDebugLogger_Debugln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockDebugLogger_Debugln_Call) Return() *mockDebugLogger_Debugln_Call { _c.Call.Return() return _c } func (_c *mockDebugLogger_Debugln_Call) RunAndReturn(run func(args ...interface{})) *mockDebugLogger_Debugln_Call { _c.Run(run) return _c } ================================================ FILE: executors/docker/internal/networks/utils.go ================================================ package networks type debugLogger interface { Debugln(args ...interface{}) } ================================================ FILE: executors/docker/internal/omitwriter/omit_writer.go ================================================ package omitwriter import ( "fmt" ) type omitWriter struct { buf []byte start int end int n int64 } func New() *omitWriter { return &omitWriter{ buf: make([]byte, 32*1024), } } func (r *omitWriter) Write(p []byte) (n int, err error) { r.n += int64(len(p)) for _, b := range p { r.buf[r.end] = b r.end = (r.end + 1) % cap(r.buf) if r.end == r.start { r.start = (r.start + 1) % cap(r.buf) } } return n, nil } func (r *omitWriter) bytes() []byte { if r.start == r.end { return nil } if r.end < r.start { part1 := r.buf[r.start:] part2 := r.buf[:r.end] return append(part1, part2...) } return r.buf[r.start:r.end] } func (r *omitWriter) Error() error { length := int64(r.end - r.start) if r.end < r.start { length = int64(cap(r.buf) - (r.start - r.end)) } if r.n > length { return fmt.Errorf("omitted %d... %s", r.n-length, string(r.bytes())) } return fmt.Errorf("%s", string(r.bytes())) } ================================================ FILE: executors/docker/internal/omitwriter/omit_writer_test.go ================================================ //go:build !integration package omitwriter import ( "strings" "testing" "github.com/stretchr/testify/require" ) func Test_OmitWriter(t *testing.T) { tests := []struct { name string input []string expected string }{ { name: "simple", input: []string{"hello"}, expected: "hello", }, { name: "multiple writes", input: []string{"first", "second"}, expected: "firstsecond", }, { name: "full buffer", input: []string{strings.Repeat("abcdefgh", (32*1024/8)-1) + "1234567"}, expected: strings.Repeat("abcdefgh", (32*1024/8)-1) + "1234567", }, { name: "wrap around", input: []string{strings.Repeat("abcdefgh", (32*1024/8)-1), "1234567wrapped_"}, expected: "omitted 8... " + strings.Repeat("abcdefgh", (32*1024/8)-2) + "1234567wrapped_", }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { writer := New() for _, input := range tc.input { _, err := writer.Write([]byte(input)) require.NoError(t, err) } require.Equal(t, tc.expected, writer.Error().Error()) }) } } ================================================ FILE: executors/docker/internal/prebuilt/prebuilt.go ================================================ package prebuilt import ( "context" "encoding/json" "errors" "fmt" "io" "os" "path/filepath" "runtime" "strings" "github.com/docker/docker/api/types/image" "github.com/sirupsen/logrus" "gitlab.com/gitlab-org/gitlab-runner/helpers/container/helperimage" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/homedir" ) const ( prebuiltExportImageExtension = ".tar.xz" prebuiltDockerArchiveImageExtension = ".docker.tar.zst" ) var PrebuiltImagesPaths []string func init() { runner, err := os.Executable() if err != nil { logrus.Errorln( "Docker executor: unable to detect gitlab-runner folder, "+ "prebuilt image helpers will be loaded from remote registry.", err, ) } runnerFolder := filepath.Dir(runner) PrebuiltImagesPaths = []string{ // When gitlab-runner is running from repository root filepath.Join(runnerFolder, "out/helper-images"), // When gitlab-runner is running from `out/binaries` filepath.Join(runnerFolder, "../helper-images"), // Add working directory path, used when running from temp directory, such as with `go run` filepath.Join(homedir.New().GetWDOrEmpty(), "out/helper-images"), } if runtime.GOOS == "linux" { // This section covers the Linux packaged app scenario, with the binary in /usr/bin. // The helper images are located in /usr/lib/gitlab-runner/helper-images, // as part of the packaging done in the create_package function in ci/package PrebuiltImagesPaths = append( PrebuiltImagesPaths, filepath.Join(runnerFolder, "../lib/gitlab-runner/helper-images"), ) } } func Get(ctx context.Context, client docker.Client, info helperimage.Info) (*image.InspectResponse, error) { if err := load(ctx, client, info); err != nil { return nil, err } image, _, err := client.ImageInspectWithRaw(ctx, info.String()) if err == nil { return &image, nil } return nil, err } func load(ctx context.Context, client docker.Client, info helperimage.Info) error { imagePaths := []string{ info.Prebuilt + prebuiltDockerArchiveImageExtension, info.Prebuilt + prebuiltExportImageExtension, } // future proof using amd64 in the future over x86_64 if strings.Contains(info.Prebuilt, "x86_64") { name := strings.ReplaceAll(info.Prebuilt, "x86_64", "amd64") imagePaths = append( imagePaths, name+prebuiltDockerArchiveImageExtension, name+prebuiltExportImageExtension, ) } var errs []error for _, imageDir := range PrebuiltImagesPaths { for _, imagePath := range imagePaths { importPath := filepath.Join(imageDir, imagePath) if strings.HasSuffix(imagePath, prebuiltDockerArchiveImageExtension) { if err := imageLoad(ctx, client, importPath, info.Name, info.Tag); err != nil { errs = append(errs, fmt.Errorf("loading %v: %w", imagePath, err)) continue } return nil } if err := imageImport(ctx, client, importPath, info.Name, info.Tag); err != nil { errs = append(errs, fmt.Errorf("importing %v: %w", imagePath, err)) continue } return nil } } return errors.Join(errs...) } func imageLoad(ctx context.Context, client docker.Client, path, ref, tag string) error { file, err := os.Open(path) if err != nil { return err } defer func() { _ = file.Close() }() resp, err := client.ImageLoad(ctx, file, true) if err != nil { return fmt.Errorf("failed to load image: %w", err) } defer resp.Body.Close() defer func() { _, _ = io.Copy(io.Discard, io.LimitReader(resp.Body, 1024)) }() // image load makes it unnecessarily difficult to get the image ref var event struct { Stream string `json:"stream"` } decoder := json.NewDecoder(resp.Body) var imageID string for decoder.More() { if err := decoder.Decode(&event); err != nil { return fmt.Errorf("decoding image id: %w", err) } switch { case strings.Contains(event.Stream, "Loaded image:"): imageID = strings.TrimSpace(strings.TrimPrefix(event.Stream, "Loaded image:")) case strings.Contains(event.Stream, "Loaded image ID:"): imageID = strings.TrimSpace(strings.TrimPrefix(event.Stream, "Loaded image ID:")) } if imageID != "" { break } } if imageID == "" { return fmt.Errorf("could not find image ID for loaded prebuilt image") } if err := client.ImageTag(ctx, imageID, ref+":"+tag); err != nil { return fmt.Errorf("tagging %v to %v:%v", imageID, ref, tag) } return nil } func imageImport(ctx context.Context, client docker.Client, path, ref, tag string) error { file, err := os.Open(path) if err != nil { return err } defer func() { _ = file.Close() }() source := image.ImportSource{ Source: file, SourceName: "-", } options := image.ImportOptions{ Tag: tag, } // non-concrete based helper images need import modifications if !strings.HasPrefix(tag, "concrete") { // NOTE: The ENTRYPOINT metadata is not preserved on export, so we need to reapply this metadata on import. // See https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/2058#note_388341301 options.Changes = []string{`ENTRYPOINT ["/usr/bin/dumb-init", "/entrypoint"]`} } if err = client.ImageImportBlocking(ctx, source, ref, options); err != nil { return fmt.Errorf("failed to import image: %w", err) } return nil } ================================================ FILE: executors/docker/internal/pull/manager.go ================================================ package pull import ( "context" "fmt" "strings" "sync" cli "github.com/docker/cli/cli/config/types" "github.com/docker/docker/api/types/image" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker/auth" "gitlab.com/gitlab-org/gitlab-runner/helpers/pull_policies" ) type Manager interface { GetDockerImage(imageName string, options spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy, ) (*image.InspectResponse, error) } type ManagerConfig struct { DockerConfig *common.DockerConfig AuthConfig string ShellUser string Credentials []spec.Credentials } type pullLogger interface { Debugln(args ...interface{}) Infoln(args ...interface{}) Warningln(args ...interface{}) Println(args ...interface{}) } type manager struct { usedImages map[string]string usedImagesLock sync.Mutex context context.Context config ManagerConfig client docker.Client onPullImageHookFunc func() logger pullLogger } func NewManager( ctx context.Context, logger pullLogger, config ManagerConfig, client docker.Client, onPullImageHookFunc func(), ) Manager { return &manager{ context: ctx, client: client, config: config, logger: logger, onPullImageHookFunc: onPullImageHookFunc, } } func (m *manager) GetDockerImage( imageName string, options spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy, ) (*image.InspectResponse, error) { pullPolicies, err := m.getPullPolicies(imagePullPolicies) if err != nil { return nil, err } allowedPullPolicies, err := m.config.DockerConfig.GetAllowedPullPolicies() if err != nil { return nil, err } pullPolicies, err = pull_policies.ComputeEffectivePullPolicies( pullPolicies, allowedPullPolicies, imagePullPolicies, m.config.DockerConfig.PullPolicy) if err != nil { return nil, &common.BuildError{ Inner: fmt.Errorf("invalid pull policy for image %q: %w", imageName, err), FailureReason: common.ConfigurationError, } } m.logger.Println(fmt.Sprintf("Using effective pull policy of %s for container %s", pullPolicies, imageName)) var imageErr error for idx, pullPolicy := range pullPolicies { attempt := 1 + idx if attempt > 1 { m.logger.Infoln(fmt.Sprintf("Attempt #%d: Trying %q pull policy", attempt, pullPolicy)) } var img *image.InspectResponse img, imageErr = m.getImageUsingPullPolicy(imageName, options, pullPolicy) if imageErr != nil { m.logger.Warningln(fmt.Sprintf("Failed to pull image with policy %q: %v", pullPolicy, imageErr)) continue } m.markImageAsUsed(imageName, img) return img, nil } return nil, fmt.Errorf( "failed to pull image %q with specified policies %v: %w", imageName, pullPolicies, imageErr, ) } func (m *manager) wasImageUsed(imageName, imageID string) bool { m.usedImagesLock.Lock() defer m.usedImagesLock.Unlock() return m.usedImages[imageName] == imageID } func (m *manager) markImageAsUsed(imageName string, image *image.InspectResponse) { m.usedImagesLock.Lock() defer m.usedImagesLock.Unlock() if m.usedImages == nil { m.usedImages = make(map[string]string) } m.usedImages[imageName] = image.ID if imageName == image.ID { return } if len(image.RepoDigests) > 0 { m.logger.Println("Using docker image", image.ID, "for", imageName, "with digest", image.RepoDigests[0], "...") } else { m.logger.Println("Using docker image", image.ID, "for", imageName, "...") } } func (m *manager) getImageUsingPullPolicy( imageName string, options spec.ImageDockerOptions, pullPolicy common.DockerPullPolicy, ) (*image.InspectResponse, error) { m.logger.Debugln("Looking for image", imageName, "...") existingImage, _, err := m.client.ImageInspectWithRaw(m.context, imageName) // Return early if we already used that image if err == nil && m.wasImageUsed(imageName, existingImage.ID) { return &existingImage, nil } // If never is specified then we return what inspect did return if pullPolicy == common.PullPolicyNever { return &existingImage, err } if err == nil { // Don't pull image that is passed by ID if existingImage.ID == imageName { return &existingImage, nil } // If not-present is specified if pullPolicy == common.PullPolicyIfNotPresent { m.logger.Println(fmt.Sprintf("Using locally found image version due to %q pull policy", pullPolicy)) return &existingImage, err } } authConfig, err := m.resolveAuthConfigForImage(imageName) if err != nil { return nil, err } return m.pullDockerImage(imageName, options, authConfig) } func (m *manager) resolveAuthConfigForImage(imageName string) (*cli.AuthConfig, error) { registryInfo, err := auth.Resolver{}.ConfigForImage( imageName, m.config.AuthConfig, m.config.ShellUser, m.config.Credentials, m.logger, ) if err != nil { return nil, err } if registryInfo == nil { m.logger.Debugln(fmt.Sprintf("No credentials found for %v", imageName)) return nil, nil } authConfig := ®istryInfo.AuthConfig m.logger.Println(fmt.Sprintf("Authenticating with credentials from %v", registryInfo.Source)) m.logger.Debugln(fmt.Sprintf( "Using %v to connect to %v in order to resolve %v...", authConfig.Username, authConfig.ServerAddress, imageName, )) return authConfig, nil } func (m *manager) pullDockerImage(imageName string, options spec.ImageDockerOptions, ac *cli.AuthConfig) (*image.InspectResponse, error) { if m.onPullImageHookFunc != nil { m.onPullImageHookFunc() } msg := "Pulling docker image %s ..." if options.Platform == "" { msg = fmt.Sprintf(msg, imageName) } else { msg = fmt.Sprintf(msg, imageName+" for platform "+options.Platform) } m.logger.Println(msg) ref := imageName // Add :latest to limit the download results if !strings.ContainsAny(ref, ":@") { ref += ":latest" } opts := image.PullOptions{ Platform: options.Platform, } var err error if opts.RegistryAuth, err = auth.EncodeConfig(ac); err != nil { return nil, &common.BuildError{Inner: err, FailureReason: common.ImagePullFailure} } if err := m.client.ImagePullBlocking(m.context, ref, opts); err != nil { return nil, &common.BuildError{Inner: err, FailureReason: common.ImagePullFailure} } image, _, err := m.client.ImageInspectWithRaw(m.context, imageName) return &image, err } // getPullPolicies selects the pull_policy configurations originating from // either gitlab-ci.yaml or config.toml. If present, the pull_policies in // gitlab-ci.yaml take precedence over those in config.toml. func (m *manager) getPullPolicies(imagePullPolicies []common.DockerPullPolicy) ([]common.DockerPullPolicy, error) { if len(imagePullPolicies) != 0 { return imagePullPolicies, nil } return m.config.DockerConfig.GetPullPolicies() } ================================================ FILE: executors/docker/internal/pull/manager_test.go ================================================ //go:build !integration package pull import ( "bytes" "context" "errors" "fmt" "os" "regexp" "strings" "testing" "github.com/docker/docker/api/types/image" "github.com/docker/docker/errdefs" logrustest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/buildlogger" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" ) func TestNewDefaultManager(t *testing.T) { m := NewManager(t.Context(), newLoggerMock(t), ManagerConfig{}, docker.NewMockClient(t), nil) assert.IsType(t, &manager{}, m) } func TestDockerForNamedImage(t *testing.T) { c := docker.NewMockClient(t) validSHA := "real@sha256:b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c" dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) c.On("ImagePullBlocking", m.context, "test:latest", mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, "tagged:tag", mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, validSHA, mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Once() image, err := m.pullDockerImage("test", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, image) image, err = m.pullDockerImage("tagged:tag", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, image) image, err = m.pullDockerImage(validSHA, dockerOptions, nil) assert.Error(t, err) assert.Nil(t, image) } func TestDockerForImagePullFailures(t *testing.T) { c := docker.NewMockClient(t) errTest := errors.New("this is a test") dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) tests := map[string]struct { imageName string initMock func(c *docker.MockClient, imageName string) assert func(t *testing.T, m *manager, imageName string) }{ "ImagePullBlocking unwrapped system failure": { imageName: "unwrapped-system:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(errdefs.System(errTest)). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking wrapped system failure": { imageName: "wrapped-system:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf("wrapped error: %w", errdefs.System(errTest))). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking two level wrapped system failure": { imageName: "two-level-wrapped-system:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf("wrapped error: %w", fmt.Errorf("wrapped error: %w", errdefs.System(errTest)))). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking wrapped request timeout failure": { imageName: "wrapped-request-timeout:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf( "wrapped error: %w", errdefs.System(errors.New( "request canceled while waiting for connection", )))). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking two level wrapped request timeout failure": { imageName: "lwo-level-wrapped-request-timeout:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf( "wrapped error: %w", fmt.Errorf( "wrapped error: %w", errdefs.System(errors.New( "request canceled while waiting for connection", ))))). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking unwrapped script failure": { imageName: "unwrapped-script:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(errdefs.NotFound(errTest)). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, "ImagePullBlocking wrapped script failure": { imageName: "wrapped-script:failure", initMock: func(c *docker.MockClient, imageName string) { c.On("ImagePullBlocking", m.context, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf("wrapped error: %w", errdefs.NotFound(errTest))). Once() }, assert: func(t *testing.T, m *manager, imageName string) { var buildError *common.BuildError image, err := m.pullDockerImage(imageName, dockerOptions, nil) assert.Nil(t, image) assert.Error(t, err) require.ErrorAs(t, err, &buildError) assert.Equal(t, buildError.FailureReason, common.ImagePullFailure) }, }, } for tn, tc := range tests { t.Run(tn, func(t *testing.T) { tc.initMock(c, tc.imageName) tc.assert(t, m, tc.imageName) }) } } func TestDockerForExistingImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) c.On("ImagePullBlocking", m.context, "existing:latest", mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() image, err := m.pullDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, image) } func TestDockerGetImageById(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } c.On("ImageInspectWithRaw", m.context, "ID"). Return(image.InspectResponse{ID: "ID"}, nil, nil). Once() image, err := m.GetDockerImage("ID", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, image) assert.Equal(t, "ID", image.ID) } func TestDockerUnknownPolicyMode(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{"unknown"}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) _, err := m.GetDockerImage("not-existing", dockerOptions, nil) assert.Error(t, err) } func TestDockerPolicyModeNever(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyNever}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "existing"}, nil, nil). Once() c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.Equal(t, "existing", image.ID) _, err = m.GetDockerImage("not-existing", dockerOptions, nil) assert.Error(t, err) } func TestDockerPolicyModeIfNotPresentForExistingImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyIfNotPresent}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, image) } func TestDockerPolicyModeIfNotPresentForNotExistingImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyIfNotPresent}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) pullImageHookCalled := false m.onPullImageHookFunc = func() { pullImageHookCalled = true } c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, "not-existing:latest", mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() img, err := m.GetDockerImage("not-existing", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, img) assert.True(t, pullImageHookCalled) c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() // It shouldn't execute the pull for second time img, err = m.GetDockerImage("not-existing", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, img) } func TestDockerPolicyModeAlwaysForExistingImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) pullImageHookCalled := false m.onPullImageHookFunc = func() { pullImageHookCalled = true } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() c.On("ImagePullBlocking", m.context, "existing:latest", mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, image) assert.True(t, pullImageHookCalled) } func TestDockerPolicyModeAlwaysForLocalOnlyImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) pullImageHookCalled := false m.onPullImageHookFunc = func() { pullImageHookCalled = true } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() c.On("ImagePullBlocking", m.context, "existing:latest", mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf("not found")). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, image) assert.True(t, pullImageHookCalled) } func TestDockerGetExistingDockerImageIfPullFails(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) c.On("ImageInspectWithRaw", m.context, "to-pull"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() c.On("ImagePullBlocking", m.context, "to-pull:latest", mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Once() img, err := m.GetDockerImage("to-pull", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, img, "Forces to authorize pulling") c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, "not-existing:latest", mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Once() img, err = m.GetDockerImage("not-existing", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, img, "No existing image") } func TestCombinedDockerPolicyModesAlwaysAndIfNotPresentForExistingImage(t *testing.T) { c := docker.NewMockClient(t) logger, _ := logrustest.NewNullLogger() output := bytes.NewBufferString("") buildLogger := buildlogger.New(&common.Trace{Writer: output}, logger.WithField("test", t.Name()), buildlogger.Options{}) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways, common.PullPolicyIfNotPresent}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.logger = &buildLogger c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() c.On("ImagePullBlocking", m.context, "existing:latest", mock.AnythingOfType("image.PullOptions")). Return(errors.New("received unexpected HTTP status: 502 Bad Gateway")). Once() c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "local-image-id"}, nil, nil). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.Contains(t, output.String(), `WARNING: Failed to pull image with policy "always": `+ `received unexpected HTTP status: 502 Bad Gateway`) assert.Contains(t, output.String(), `Attempt #2: Trying "if-not-present" pull policy`) assert.Contains(t, output.String(), `Using locally found image version due to "if-not-present" pull policy`) require.NotNil(t, image) assert.Equal(t, "local-image-id", image.ID) } func TestCombinedDockerPolicyModeAlwaysAndIfNotPresentForNonExistingImage(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways, common.PullPolicyIfNotPresent}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, "not-existing:latest", mock.AnythingOfType("image.PullOptions")). Return(os.ErrNotExist). Twice() c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() image, err := m.GetDockerImage("not-existing", dockerOptions, nil) assert.Error(t, err) assert.Nil(t, image, "No existing image") } func TestPullPolicyWhenAlwaysIsSet(t *testing.T) { remoteImage := "registry.domain.tld:5005/image/name:version" gitlabImage := "registry.gitlab.tld:1234/image/name:version" dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyAlways}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, nil, dockerConfig) testGetDockerImage(t, m, remoteImage, dockerOptions, addPullsRemoteImageExpectations) testDeniesDockerImage(t, m, remoteImage, dockerOptions, addDeniesPullExpectations) testGetDockerImage(t, m, gitlabImage, dockerOptions, addPullsRemoteImageExpectations) testDeniesDockerImage(t, m, gitlabImage, dockerOptions, addDeniesPullExpectations) } func TestPullPolicyWhenIfNotPresentIsSet(t *testing.T) { remoteImage := "registry.domain.tld:5005/image/name:version" gitlabImage := "registry.gitlab.tld:1234/image/name:version" dockerConfig := &common.DockerConfig{PullPolicy: []string{common.PullPolicyIfNotPresent}} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, nil, dockerConfig) testGetDockerImage(t, m, remoteImage, dockerOptions, addFindsLocalImageExpectations) testGetDockerImage(t, m, gitlabImage, dockerOptions, addFindsLocalImageExpectations) } func TestPullPolicyPassedAsIfNotPresentForExistingAndConfigAlways(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{common.PullPolicyAlways}, AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways, common.PullPolicyIfNotPresent}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() imagePullPolicies := []common.DockerPullPolicy{common.PullPolicyIfNotPresent} image, err := m.GetDockerImage("existing", dockerOptions, imagePullPolicies) assert.NoError(t, err) assert.NotNil(t, image) } func TestPullPolicyPassedAsIfNotPresentForNonExistingAndConfigAlways(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{common.PullPolicyAlways}, AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways, common.PullPolicyIfNotPresent}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) pullImageHookCalled := false m.onPullImageHookFunc = func() { pullImageHookCalled = true } c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{}, nil, os.ErrNotExist). Once() c.On("ImagePullBlocking", m.context, "not-existing:latest", mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", m.context, "not-existing"). Return(image.InspectResponse{ID: "image-id"}, nil, nil). Once() imagePullPolicies := []common.DockerPullPolicy{common.PullPolicyIfNotPresent} image, err := m.GetDockerImage("not-existing", dockerOptions, imagePullPolicies) assert.NoError(t, err) assert.NotNil(t, image) assert.True(t, pullImageHookCalled, "image should have been pulled") } func TestPullPolicyPassedAsIfNotPresentButNotAllowedDefault(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } imagePullPolicies := []common.DockerPullPolicy{common.PullPolicyIfNotPresent} _, err := m.GetDockerImage("existing", dockerOptions, imagePullPolicies) assert.Contains( t, err.Error(), `invalid pull policy for image "existing"`, ) assert.Regexp(t, regexp.MustCompile(`if-not-present.* GitLab pipeline config .*always`), err.Error()) } func TestPullPolicyPassedAsIfNotPresentButNotAllowed(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyNever}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } imagePullPolicies := []common.DockerPullPolicy{common.PullPolicyIfNotPresent} _, err := m.GetDockerImage("existing", dockerOptions, imagePullPolicies) assert.Contains( t, err.Error(), `invalid pull policy for image "existing"`, ) assert.Regexp(t, regexp.MustCompile(`if-not-present.* GitLab pipeline config .*never`), err.Error()) } func TestPullPolicyWhenConfigIsNotAllowed(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{common.PullPolicyNever, common.PullPolicyIfNotPresent}, AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } _, err := m.GetDockerImage("existing", dockerOptions, nil) assert.Contains( t, err.Error(), `invalid pull policy for image "existing"`, ) assert.Regexp(t, regexp.MustCompile(`never if-not-present.* Runner config .*always`), err.Error()) } func TestPullPolicyWhenConfigIsAllowed(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{common.PullPolicyNever}, AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent, common.PullPolicyNever}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } c.On("ImageInspectWithRaw", m.context, "existing"). Return(image.InspectResponse{ID: "existing"}, nil, nil). Once() image, err := m.GetDockerImage("existing", dockerOptions, nil) assert.NoError(t, err) assert.Equal(t, "existing", image.ID) } func TestPullPolicyWhenConfigPullPolicyIsInvalid(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{"invalid"}, AllowedPullPolicies: []common.DockerPullPolicy{common.PullPolicyAlways}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } _, err := m.GetDockerImage("existing", dockerOptions, nil) assert.EqualError( t, err, "unsupported pull_policy config: \"invalid\"", ) } func TestPullPolicyWhenConfigAllowedPullPoliciesIsInvalid(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{ PullPolicy: []string{common.PullPolicyAlways}, AllowedPullPolicies: []common.DockerPullPolicy{"invalid"}, } dockerOptions := spec.ImageDockerOptions{} m := newDefaultTestManager(t, c, dockerConfig) m.onPullImageHookFunc = func() { assert.Fail(t, "image should not be pulled") } _, err := m.GetDockerImage("existing", dockerOptions, nil) assert.EqualError( t, err, "unsupported allowed_pull_policies config: \"invalid\"", ) } func newLoggerMock(t *testing.T) *mockPullLogger { loggerMock := newMockPullLogger(t) loggerMock.On( "Debugln", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), ).Maybe() loggerMock.On("Infoln", mock.AnythingOfType("string")).Maybe() loggerMock.On("Warningln", mock.AnythingOfType("string")).Maybe() loggerMock.On("Println", mock.AnythingOfType("string"), mock.Anything).Maybe() loggerMock.On( "Println", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), ).Maybe() loggerMock.On( "Println", mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), mock.AnythingOfType("string"), ).Maybe() return loggerMock } func newDefaultTestManager(t *testing.T, client *docker.MockClient, dockerConfig *common.DockerConfig) *manager { // Create a unique context value that can be later compared with to ensure // that the production code is passing it to the mocks ctx := context.WithValue(t.Context(), new(struct{}), "unique context") return &manager{ context: ctx, logger: newLoggerMock(t), config: ManagerConfig{ DockerConfig: dockerConfig, }, client: client, } } func testGetDockerImage( t *testing.T, m *manager, imageName string, dockerOptions spec.ImageDockerOptions, setClientExpectations func(c *docker.MockClient, imageName string), ) { t.Run("get:"+imageName, func(t *testing.T) { c := docker.NewMockClient(t) m.client = c setClientExpectations(c, imageName) image, err := m.GetDockerImage(imageName, dockerOptions, nil) assert.NoError(t, err, "Should not generate error") assert.Equal(t, "this-image", image.ID, "Image ID") }) } func testDeniesDockerImage( t *testing.T, m *manager, imageName string, dockerOptions spec.ImageDockerOptions, setClientExpectations func(c *docker.MockClient, imageName string), ) { t.Run("deny:"+imageName, func(t *testing.T) { c := docker.NewMockClient(t) m.client = c setClientExpectations(c, imageName) _, err := m.GetDockerImage(imageName, dockerOptions, nil) assert.Error(t, err, "Should generate error") }) } func addFindsLocalImageExpectations(c *docker.MockClient, imageName string) { c.On("ImageInspectWithRaw", mock.Anything, imageName). Return(image.InspectResponse{ID: "this-image"}, nil, nil). Once() } func addPullsRemoteImageExpectations(c *docker.MockClient, imageName string) { c.On("ImageInspectWithRaw", mock.Anything, imageName). Return(image.InspectResponse{ID: "not-this-image"}, nil, nil). Once() c.On("ImagePullBlocking", mock.Anything, imageName, mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", mock.Anything, imageName). Return(image.InspectResponse{ID: "this-image"}, nil, nil). Once() } func addDeniesPullExpectations(c *docker.MockClient, imageName string) { c.On("ImageInspectWithRaw", mock.Anything, imageName). Return(image.InspectResponse{ID: "image"}, nil, nil). Once() c.On("ImagePullBlocking", mock.Anything, imageName, mock.AnythingOfType("image.PullOptions")). Return(fmt.Errorf("deny pulling")). Once() } func Test_manager_getPullPolicies(t *testing.T) { m := manager{ config: ManagerConfig{ DockerConfig: &common.DockerConfig{}, }, } tests := map[string]struct { imagePullPolicies []common.DockerPullPolicy pullPolicy common.StringOrArray want []common.DockerPullPolicy }{ "gitlab-ci.yaml only": { imagePullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, pullPolicy: common.StringOrArray{}, want: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, }, "config.toml only": { imagePullPolicies: []common.DockerPullPolicy{}, pullPolicy: common.StringOrArray{common.PullPolicyIfNotPresent}, want: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, }, "both": { imagePullPolicies: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, pullPolicy: common.StringOrArray{common.PullPolicyNever}, want: []common.DockerPullPolicy{common.PullPolicyIfNotPresent}, }, "not configured": { imagePullPolicies: []common.DockerPullPolicy{}, pullPolicy: common.StringOrArray{}, want: []common.DockerPullPolicy{common.PullPolicyAlways}, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { m.config.DockerConfig.PullPolicy = tt.pullPolicy got, err := m.getPullPolicies(tt.imagePullPolicies) assert.NoError(t, err) assert.Equal(t, tt.want, got) }) } } func TestDockerGetImagePlatformSuccess(t *testing.T) { c := docker.NewMockClient(t) dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{} dockerOptions.Platform = "arm64/v8" m := newDefaultTestManager(t, c, dockerConfig) c.On("ImagePullBlocking", m.context, "test:latest", mock.AnythingOfType("image.PullOptions")). Return(nil). Once() c.On("ImageInspectWithRaw", m.context, "test"). Return(image.InspectResponse{Architecture: "arm64/v8"}, nil, nil). Once() image, err := m.pullDockerImage("test", dockerOptions, nil) assert.NoError(t, err) assert.NotNil(t, image) assert.Equal(t, "arm64/v8", image.Architecture) } func TestGetDockerImageWithPlatform(t *testing.T) { remoteImage := "registry.domain.tld:5005/image/name:version" dockerConfig := &common.DockerConfig{} dockerOptions := spec.ImageDockerOptions{Platform: "foo/bar"} m := newDefaultTestManager(t, nil, dockerConfig) testGetDockerImage(t, m, remoteImage, dockerOptions, addPullsRemoteImageExpectations) } func TestResolveAuthConfigForImageErrorsOnPathTraversal(t *testing.T) { loggerMock := newMockPullLogger(t) loggerMock.On("Debugln", mock.Anything, mock.Anything, mock.Anything).Maybe() m := &manager{ context: t.Context(), logger: loggerMock, config: ManagerConfig{ DockerConfig: &common.DockerConfig{}, AuthConfig: `{"credsStore": "../../usr/bin/sudo"}`, }, } authConfig, err := m.resolveAuthConfigForImage("registry.domain.tld:5005/image/name:version") assert.ErrorContains(t, err, "path traversal") assert.Nil(t, authConfig) } func TestResolveAuthConfigForImageWarnsMissingCredentialHelper(t *testing.T) { loggerMock := newMockPullLogger(t) loggerMock.On("Debugln", mock.Anything, mock.Anything, mock.Anything).Maybe() loggerMock.On("Warningln", mock.MatchedBy(func(msg string) bool { return strings.Contains(msg, "$DOCKER_AUTH_CONFIG") && strings.Contains(msg, "Credentials from this source will not be used") })).Once() m := &manager{ context: t.Context(), logger: loggerMock, config: ManagerConfig{ DockerConfig: &common.DockerConfig{}, AuthConfig: `{"credsStore": "nonexistent-helper"}`, }, } authConfig, err := m.resolveAuthConfigForImage("registry.domain.tld:5005/image/name:version") assert.NoError(t, err) assert.Nil(t, authConfig) } ================================================ FILE: executors/docker/internal/pull/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package pull import ( "github.com/docker/docker/api/types/image" mock "github.com/stretchr/testify/mock" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" ) // NewMockManager creates a new instance of MockManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockManager(t interface { mock.TestingT Cleanup(func()) }) *MockManager { mock := &MockManager{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockManager is an autogenerated mock type for the Manager type type MockManager struct { mock.Mock } type MockManager_Expecter struct { mock *mock.Mock } func (_m *MockManager) EXPECT() *MockManager_Expecter { return &MockManager_Expecter{mock: &_m.Mock} } // GetDockerImage provides a mock function for the type MockManager func (_mock *MockManager) GetDockerImage(imageName string, options spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy) (*image.InspectResponse, error) { ret := _mock.Called(imageName, options, imagePullPolicies) if len(ret) == 0 { panic("no return value specified for GetDockerImage") } var r0 *image.InspectResponse var r1 error if returnFunc, ok := ret.Get(0).(func(string, spec.ImageDockerOptions, []common.DockerPullPolicy) (*image.InspectResponse, error)); ok { return returnFunc(imageName, options, imagePullPolicies) } if returnFunc, ok := ret.Get(0).(func(string, spec.ImageDockerOptions, []common.DockerPullPolicy) *image.InspectResponse); ok { r0 = returnFunc(imageName, options, imagePullPolicies) } else { if ret.Get(0) != nil { r0 = ret.Get(0).(*image.InspectResponse) } } if returnFunc, ok := ret.Get(1).(func(string, spec.ImageDockerOptions, []common.DockerPullPolicy) error); ok { r1 = returnFunc(imageName, options, imagePullPolicies) } else { r1 = ret.Error(1) } return r0, r1 } // MockManager_GetDockerImage_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GetDockerImage' type MockManager_GetDockerImage_Call struct { *mock.Call } // GetDockerImage is a helper method to define mock.On call // - imageName string // - options spec.ImageDockerOptions // - imagePullPolicies []common.DockerPullPolicy func (_e *MockManager_Expecter) GetDockerImage(imageName interface{}, options interface{}, imagePullPolicies interface{}) *MockManager_GetDockerImage_Call { return &MockManager_GetDockerImage_Call{Call: _e.mock.On("GetDockerImage", imageName, options, imagePullPolicies)} } func (_c *MockManager_GetDockerImage_Call) Run(run func(imageName string, options spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy)) *MockManager_GetDockerImage_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 string if args[0] != nil { arg0 = args[0].(string) } var arg1 spec.ImageDockerOptions if args[1] != nil { arg1 = args[1].(spec.ImageDockerOptions) } var arg2 []common.DockerPullPolicy if args[2] != nil { arg2 = args[2].([]common.DockerPullPolicy) } run( arg0, arg1, arg2, ) }) return _c } func (_c *MockManager_GetDockerImage_Call) Return(inspectResponse *image.InspectResponse, err error) *MockManager_GetDockerImage_Call { _c.Call.Return(inspectResponse, err) return _c } func (_c *MockManager_GetDockerImage_Call) RunAndReturn(run func(imageName string, options spec.ImageDockerOptions, imagePullPolicies []common.DockerPullPolicy) (*image.InspectResponse, error)) *MockManager_GetDockerImage_Call { _c.Call.Return(run) return _c } // newMockPullLogger creates a new instance of mockPullLogger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockPullLogger(t interface { mock.TestingT Cleanup(func()) }) *mockPullLogger { mock := &mockPullLogger{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockPullLogger is an autogenerated mock type for the pullLogger type type mockPullLogger struct { mock.Mock } type mockPullLogger_Expecter struct { mock *mock.Mock } func (_m *mockPullLogger) EXPECT() *mockPullLogger_Expecter { return &mockPullLogger_Expecter{mock: &_m.Mock} } // Debugln provides a mock function for the type mockPullLogger func (_mock *mockPullLogger) Debugln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockPullLogger_Debugln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Debugln' type mockPullLogger_Debugln_Call struct { *mock.Call } // Debugln is a helper method to define mock.On call // - args ...interface{} func (_e *mockPullLogger_Expecter) Debugln(args ...interface{}) *mockPullLogger_Debugln_Call { return &mockPullLogger_Debugln_Call{Call: _e.mock.On("Debugln", append([]interface{}{}, args...)...)} } func (_c *mockPullLogger_Debugln_Call) Run(run func(args ...interface{})) *mockPullLogger_Debugln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockPullLogger_Debugln_Call) Return() *mockPullLogger_Debugln_Call { _c.Call.Return() return _c } func (_c *mockPullLogger_Debugln_Call) RunAndReturn(run func(args ...interface{})) *mockPullLogger_Debugln_Call { _c.Run(run) return _c } // Infoln provides a mock function for the type mockPullLogger func (_mock *mockPullLogger) Infoln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockPullLogger_Infoln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Infoln' type mockPullLogger_Infoln_Call struct { *mock.Call } // Infoln is a helper method to define mock.On call // - args ...interface{} func (_e *mockPullLogger_Expecter) Infoln(args ...interface{}) *mockPullLogger_Infoln_Call { return &mockPullLogger_Infoln_Call{Call: _e.mock.On("Infoln", append([]interface{}{}, args...)...)} } func (_c *mockPullLogger_Infoln_Call) Run(run func(args ...interface{})) *mockPullLogger_Infoln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockPullLogger_Infoln_Call) Return() *mockPullLogger_Infoln_Call { _c.Call.Return() return _c } func (_c *mockPullLogger_Infoln_Call) RunAndReturn(run func(args ...interface{})) *mockPullLogger_Infoln_Call { _c.Run(run) return _c } // Println provides a mock function for the type mockPullLogger func (_mock *mockPullLogger) Println(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockPullLogger_Println_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Println' type mockPullLogger_Println_Call struct { *mock.Call } // Println is a helper method to define mock.On call // - args ...interface{} func (_e *mockPullLogger_Expecter) Println(args ...interface{}) *mockPullLogger_Println_Call { return &mockPullLogger_Println_Call{Call: _e.mock.On("Println", append([]interface{}{}, args...)...)} } func (_c *mockPullLogger_Println_Call) Run(run func(args ...interface{})) *mockPullLogger_Println_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockPullLogger_Println_Call) Return() *mockPullLogger_Println_Call { _c.Call.Return() return _c } func (_c *mockPullLogger_Println_Call) RunAndReturn(run func(args ...interface{})) *mockPullLogger_Println_Call { _c.Run(run) return _c } // Warningln provides a mock function for the type mockPullLogger func (_mock *mockPullLogger) Warningln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockPullLogger_Warningln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Warningln' type mockPullLogger_Warningln_Call struct { *mock.Call } // Warningln is a helper method to define mock.On call // - args ...interface{} func (_e *mockPullLogger_Expecter) Warningln(args ...interface{}) *mockPullLogger_Warningln_Call { return &mockPullLogger_Warningln_Call{Call: _e.mock.On("Warningln", append([]interface{}{}, args...)...)} } func (_c *mockPullLogger_Warningln_Call) Run(run func(args ...interface{})) *mockPullLogger_Warningln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockPullLogger_Warningln_Call) Return() *mockPullLogger_Warningln_Call { _c.Call.Return() return _c } func (_c *mockPullLogger_Warningln_Call) RunAndReturn(run func(args ...interface{})) *mockPullLogger_Warningln_Call { _c.Run(run) return _c } ================================================ FILE: executors/docker/internal/user/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package user import ( "context" mock "github.com/stretchr/testify/mock" ) // NewMockInspect creates a new instance of MockInspect. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockInspect(t interface { mock.TestingT Cleanup(func()) }) *MockInspect { mock := &MockInspect{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockInspect is an autogenerated mock type for the Inspect type type MockInspect struct { mock.Mock } type MockInspect_Expecter struct { mock *mock.Mock } func (_m *MockInspect) EXPECT() *MockInspect_Expecter { return &MockInspect_Expecter{mock: &_m.Mock} } // GID provides a mock function for the type MockInspect func (_mock *MockInspect) GID(ctx context.Context, containerID string) (int, error) { ret := _mock.Called(ctx, containerID) if len(ret) == 0 { panic("no return value specified for GID") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string) (int, error)); ok { return returnFunc(ctx, containerID) } if returnFunc, ok := ret.Get(0).(func(context.Context, string) int); ok { r0 = returnFunc(ctx, containerID) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { r1 = returnFunc(ctx, containerID) } else { r1 = ret.Error(1) } return r0, r1 } // MockInspect_GID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'GID' type MockInspect_GID_Call struct { *mock.Call } // GID is a helper method to define mock.On call // - ctx context.Context // - containerID string func (_e *MockInspect_Expecter) GID(ctx interface{}, containerID interface{}) *MockInspect_GID_Call { return &MockInspect_GID_Call{Call: _e.mock.On("GID", ctx, containerID)} } func (_c *MockInspect_GID_Call) Run(run func(ctx context.Context, containerID string)) *MockInspect_GID_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockInspect_GID_Call) Return(n int, err error) *MockInspect_GID_Call { _c.Call.Return(n, err) return _c } func (_c *MockInspect_GID_Call) RunAndReturn(run func(ctx context.Context, containerID string) (int, error)) *MockInspect_GID_Call { _c.Call.Return(run) return _c } // IsRoot provides a mock function for the type MockInspect func (_mock *MockInspect) IsRoot(ctx context.Context, imageID string) (bool, error) { ret := _mock.Called(ctx, imageID) if len(ret) == 0 { panic("no return value specified for IsRoot") } var r0 bool var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string) (bool, error)); ok { return returnFunc(ctx, imageID) } if returnFunc, ok := ret.Get(0).(func(context.Context, string) bool); ok { r0 = returnFunc(ctx, imageID) } else { r0 = ret.Get(0).(bool) } if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { r1 = returnFunc(ctx, imageID) } else { r1 = ret.Error(1) } return r0, r1 } // MockInspect_IsRoot_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'IsRoot' type MockInspect_IsRoot_Call struct { *mock.Call } // IsRoot is a helper method to define mock.On call // - ctx context.Context // - imageID string func (_e *MockInspect_Expecter) IsRoot(ctx interface{}, imageID interface{}) *MockInspect_IsRoot_Call { return &MockInspect_IsRoot_Call{Call: _e.mock.On("IsRoot", ctx, imageID)} } func (_c *MockInspect_IsRoot_Call) Run(run func(ctx context.Context, imageID string)) *MockInspect_IsRoot_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockInspect_IsRoot_Call) Return(b bool, err error) *MockInspect_IsRoot_Call { _c.Call.Return(b, err) return _c } func (_c *MockInspect_IsRoot_Call) RunAndReturn(run func(ctx context.Context, imageID string) (bool, error)) *MockInspect_IsRoot_Call { _c.Call.Return(run) return _c } // UID provides a mock function for the type MockInspect func (_mock *MockInspect) UID(ctx context.Context, containerID string) (int, error) { ret := _mock.Called(ctx, containerID) if len(ret) == 0 { panic("no return value specified for UID") } var r0 int var r1 error if returnFunc, ok := ret.Get(0).(func(context.Context, string) (int, error)); ok { return returnFunc(ctx, containerID) } if returnFunc, ok := ret.Get(0).(func(context.Context, string) int); ok { r0 = returnFunc(ctx, containerID) } else { r0 = ret.Get(0).(int) } if returnFunc, ok := ret.Get(1).(func(context.Context, string) error); ok { r1 = returnFunc(ctx, containerID) } else { r1 = ret.Error(1) } return r0, r1 } // MockInspect_UID_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UID' type MockInspect_UID_Call struct { *mock.Call } // UID is a helper method to define mock.On call // - ctx context.Context // - containerID string func (_e *MockInspect_Expecter) UID(ctx interface{}, containerID interface{}) *MockInspect_UID_Call { return &MockInspect_UID_Call{Call: _e.mock.On("UID", ctx, containerID)} } func (_c *MockInspect_UID_Call) Run(run func(ctx context.Context, containerID string)) *MockInspect_UID_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockInspect_UID_Call) Return(n int, err error) *MockInspect_UID_Call { _c.Call.Return(n, err) return _c } func (_c *MockInspect_UID_Call) RunAndReturn(run func(ctx context.Context, containerID string) (int, error)) *MockInspect_UID_Call { _c.Call.Return(run) return _c } ================================================ FILE: executors/docker/internal/user/user.go ================================================ package user import ( "bytes" "context" "errors" "fmt" "strconv" "strings" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/exec" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/limitwriter" ) const ( commandIDU = "id -u" commandIDG = "id -g" ) var errIDNoOutput = errors.New("id command returned no output on stdout") type Inspect interface { IsRoot(ctx context.Context, imageID string) (bool, error) UID(ctx context.Context, containerID string) (int, error) GID(ctx context.Context, containerID string) (int, error) } func NewInspect(c docker.Client, exec exec.Docker) Inspect { return &defaultInspect{ c: c, exec: exec, } } type defaultInspect struct { c docker.Client exec exec.Docker } func (i *defaultInspect) IsRoot(ctx context.Context, imageID string) (bool, error) { img, _, err := i.c.ImageInspectWithRaw(ctx, imageID) if err != nil { return true, fmt.Errorf("inspecting container %q image: %w", imageID, err) } if img.Config == nil || img.Config.User == "" || img.Config.User == "root" { return true, nil } return false, nil } func (i *defaultInspect) UID(ctx context.Context, containerID string) (int, error) { return i.executeCommand(ctx, containerID, commandIDU) } func (i *defaultInspect) GID(ctx context.Context, containerID string) (int, error) { return i.executeCommand(ctx, containerID, commandIDG) } func retrieveLastLine(s string) string { lines := strings.Split(strings.TrimSpace(s), "\n") return lines[len(lines)-1] } func (i *defaultInspect) executeCommand(ctx context.Context, containerID string, command string) (int, error) { stdout := new(bytes.Buffer) stderr := new(bytes.Buffer) streams := exec.IOStreams{ Stdin: strings.NewReader(command), Stdout: limitwriter.New(stdout, 1024), Stderr: limitwriter.New(stderr, 1024), } err := i.exec.Exec(ctx, containerID, streams, nil) if err != nil { return 0, fmt.Errorf("executing %q on container %q: %w", command, containerID, err) } stdoutContent := retrieveLastLine(stdout.String()) stderrContent := strings.TrimSpace(stderr.String()) if len(stdoutContent) < 1 { return 0, fmt.Errorf("%w (stderr: %s)", errIDNoOutput, stderrContent) } id, err := strconv.Atoi(stdoutContent) if err != nil { return 0, fmt.Errorf("parsing %q output: %w (stderr: %s)", command, err, stderrContent) } return id, nil } ================================================ FILE: executors/docker/internal/user/user_test.go ================================================ //go:build !integration package user import ( "context" "fmt" "io" "strconv" "testing" "github.com/docker/docker/api/types/image" dockerspec "github.com/moby/docker-image-spec/specs-go/v1" ocispec "github.com/opencontainers/image-spec/specs-go/v1" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/exec" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" ) func TestDefaultInspect_IsRoot(t *testing.T) { containerID := "container-id" tests := map[string]struct { setupDockerClientMock func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) expectedIsRoot bool expectedError error }{ "ImageInspectWithRaw error": { setupDockerClientMock: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ImageInspectWithRaw", expectedCtx, containerID). Return(image.InspectResponse{}, nil, assert.AnError). Once() }, expectedIsRoot: true, expectedError: assert.AnError, }, "empty Config": { setupDockerClientMock: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ImageInspectWithRaw", expectedCtx, containerID). Return(image.InspectResponse{}, nil, nil). Once() }, expectedIsRoot: true, expectedError: nil, }, "empty user entry in Config": { setupDockerClientMock: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ImageInspectWithRaw", expectedCtx, containerID). Return(image.InspectResponse{Config: &dockerspec.DockerOCIImageConfig{ImageConfig: ocispec.ImageConfig{User: ""}}}, nil, nil). Once() }, expectedIsRoot: true, expectedError: nil, }, "user entry in Config set to root": { setupDockerClientMock: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ImageInspectWithRaw", expectedCtx, containerID). Return(image.InspectResponse{Config: &dockerspec.DockerOCIImageConfig{ImageConfig: ocispec.ImageConfig{User: "root"}}}, nil, nil). Once() }, expectedIsRoot: true, expectedError: nil, }, "user entry in Config set to non-root": { setupDockerClientMock: func(t *testing.T, clientMock *docker.MockClient, expectedCtx context.Context) { clientMock.On("ImageInspectWithRaw", expectedCtx, containerID). Return(image.InspectResponse{Config: &dockerspec.DockerOCIImageConfig{ImageConfig: ocispec.ImageConfig{User: "non-root"}}}, nil, nil). Once() }, expectedIsRoot: false, expectedError: nil, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { clientMock := docker.NewMockClient(t) execMock := exec.NewMockDocker(t) ctx := t.Context() tt.setupDockerClientMock(t, clientMock, ctx) inspect := NewInspect(clientMock, execMock) isRoot, err := inspect.IsRoot(ctx, containerID) if tt.expectedError != nil { assert.ErrorIs(t, err, tt.expectedError) return } assert.NoError(t, err) assert.Equal(t, tt.expectedIsRoot, isRoot, "user root-status mismatch") }) } } type uidAndGidTestCase struct { assertExecMock func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) expectedID int assertError func(t *testing.T, err error) } func TestDefaultInspect_UID(t *testing.T) { testDefaultInspectUIDandGID( t, commandIDU, func(inspect Inspect, ctx context.Context, containerID string) (int, error) { return inspect.UID(ctx, containerID) }, ) } func TestDefaultInspect_GID(t *testing.T) { testDefaultInspectUIDandGID( t, commandIDG, func(inspect Inspect, ctx context.Context, containerID string) (int, error) { return inspect.GID(ctx, containerID) }, ) } func testDefaultInspectUIDandGID( t *testing.T, expectedCommand string, testCall func(inspect Inspect, ctx context.Context, containerID string) (int, error), ) { containerID := "container-id" assertCommand := func(t *testing.T, args mock.Arguments) { streams, ok := args.Get(2).(exec.IOStreams) require.True(t, ok) data, err := io.ReadAll(streams.Stdin) require.NoError(t, err) assert.Equal(t, expectedCommand, string(data)) } mockOutput := func(t *testing.T, args mock.Arguments, stdout string, stderr string) { streams, ok := args.Get(2).(exec.IOStreams) require.True(t, ok) _, err := fmt.Fprintln(streams.Stdout, stdout) require.NoError(t, err) _, err = fmt.Fprintln(streams.Stderr, stderr) require.NoError(t, err) } tests := map[string]uidAndGidTestCase{ "Exec error": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) }). Return(assert.AnError). Once() }, expectedID: 0, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, assert.AnError) }, }, "ID parsing error": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, "\n\ntest\n\n", "") }). Return(nil). Once() }, expectedID: 0, assertError: func(t *testing.T, err error) { var e *strconv.NumError assert.ErrorAs(t, err, &e) }, }, "err output mixed with expected stdout output": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, "\n\n123\n\n", "Some mixed error output") }). Return(nil). Once() }, expectedID: 123, assertError: nil, }, "empty output of the id command": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, "\n\n\n\n", "") }). Return(nil). Once() }, expectedID: 0, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, errIDNoOutput) }, }, "proper ID received from output": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, "\n\n123\n\n", "") }). Return(nil). Once() }, expectedID: 123, assertError: nil, }, "blank lines": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, " \n \n ", "") }). Return(nil). Once() }, expectedID: 0, assertError: func(t *testing.T, err error) { assert.ErrorIs(t, err, errIDNoOutput) }, }, "empty lines received at the end of the output": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, "1000 \n \n", "") }). Return(nil). Once() }, expectedID: 1000, assertError: nil, }, "ID received at the end of the output": { assertExecMock: func(t *testing.T, clientMock *exec.MockDocker, expectedCtx context.Context) { clientMock.On("Exec", expectedCtx, containerID, mock.Anything, mock.AnythingOfType("wait.GracefulExitFunc")). Run(func(args mock.Arguments) { assertCommand(t, args) mockOutput(t, args, ` Hello world 1000`, "") }). Return(nil). Once() }, expectedID: 1000, assertError: nil, }, } for tn, tt := range tests { t.Run(tn, func(t *testing.T) { clientMock := docker.NewMockClient(t) execMock := exec.NewMockDocker(t) ctx := t.Context() tt.assertExecMock(t, execMock, ctx) inspect := NewInspect(clientMock, execMock) id, err := testCall(inspect, ctx, containerID) assert.Equal(t, tt.expectedID, id) if tt.assertError != nil { tt.assertError(t, err) return } assert.NoError(t, err) }) } } ================================================ FILE: executors/docker/internal/volumes/manager.go ================================================ package volumes import ( "context" "fmt" "strconv" "github.com/docker/docker/api/types/volume" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/permission" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" ) const protectedSuffix = "-protected" type Manager interface { Create(ctx context.Context, volume string) error CreateTemporary(ctx context.Context, destination string) error RemoveTemporary(ctx context.Context) error Binds() []string } type ManagerConfig struct { CacheDir string BasePath string UniqueName string TemporaryName string DisableCache bool PermissionSetter permission.Setter Driver string DriverOpts map[string]string Protected bool } type manager struct { config ManagerConfig logger debugLogger parser parser.Parser client docker.Client permissionSetter permission.Setter labeler labels.Labeler volumeBindings []string temporaryVolumes []string managedVolumes pathList } func NewManager( logger debugLogger, volumeParser parser.Parser, c docker.Client, config ManagerConfig, labeler labels.Labeler, ) Manager { return &manager{ config: config, logger: logger, parser: volumeParser, client: c, volumeBindings: make([]string, 0), managedVolumes: pathList{}, permissionSetter: config.PermissionSetter, labeler: labeler, } } // Create will create a new Docker volume bind for the specified volume. The // volume can either be a host volume `/src:/dst`, meaning it will mount // something from the host to the container or `/dst` which will create a Docker // volume and mount it to the specified path. func (m *manager) Create(ctx context.Context, volume string) error { if len(volume) < 1 { return nil } parsedVolume, err := m.parser.ParseVolume(volume) if err != nil { return fmt.Errorf("parse volume: %w", err) } switch parsedVolume.Len() { case 2: err = m.addHostVolume(parsedVolume) if err != nil { err = fmt.Errorf("adding host volume: %w", err) } case 1: err = m.addCacheVolume(ctx, parsedVolume) if err != nil { err = fmt.Errorf("adding cache volume: %w", err) } default: err = fmt.Errorf("unsupported volume definition %s", volume) } return err } func (m *manager) addHostVolume(volume *parser.Volume) error { var err error volume.Destination, err = m.absolutePath(volume.Destination) if err != nil { return fmt.Errorf("defining absolute path: %w", err) } err = m.managedVolumes.Add(volume.Destination) if err != nil { return fmt.Errorf("updating managed volume list: %w", err) } m.appendVolumeBind(volume) return nil } func (m *manager) absolutePath(dir string) (string, error) { if m.parser.Path().IsRoot(dir) { return "", errDirectoryIsRootPath } if m.parser.Path().IsAbs(dir) { return dir, nil } return m.parser.Path().Join(m.config.BasePath, dir), nil } func (m *manager) appendVolumeBind(volume *parser.Volume) { m.logger.Debugln(fmt.Sprintf("Using host-based %q for %q...", volume.Source, volume.Destination)) m.volumeBindings = append(m.volumeBindings, volume.Definition()) } func (m *manager) addCacheVolume(ctx context.Context, volume *parser.Volume) error { // disable cache for automatic container cache, // but leave it for host volumes (they are shared on purpose) if m.config.DisableCache { m.logger.Debugln(fmt.Sprintf("Cache containers feature is disabled, creating non-reusable volume for %q", volume.Destination)) volumeName, err := m.createCacheVolume(ctx, volume.Destination, false) if err != nil { return err } m.temporaryVolumes = append(m.temporaryVolumes, volumeName) return nil } if m.config.CacheDir != "" { return m.createHostBasedCacheVolume(volume.Destination) } _, err := m.createCacheVolume(ctx, volume.Destination, true) return err } func (m *manager) createHostBasedCacheVolume(destination string) error { destination, err := m.absolutePath(destination) if err != nil { return fmt.Errorf("defining absolute path: %w", err) } err = m.managedVolumes.Add(destination) if err != nil { return fmt.Errorf("updating managed volumes list: %w", err) } // The leaf directory dir has a name with a length of: // - 42 chars when protected // - 32 chars when not protected (the length of the md5sum only) dir := m.withProtected(hashPath(destination)) hostPath := m.parser.Path().Join(m.config.CacheDir, m.config.UniqueName, dir) m.appendVolumeBind(&parser.Volume{ Source: hostPath, Destination: destination, }) return nil } func (m *manager) createCacheVolume( ctx context.Context, destination string, reusable bool, ) (string, error) { destination, err := m.absolutePath(destination) if err != nil { return "", fmt.Errorf("defining absolute path: %w", err) } err = m.managedVolumes.Add(destination) if err != nil { return "", fmt.Errorf("updating managed volumes list: %w", err) } hashedDestination := hashPath(destination) name := m.config.TemporaryName if reusable { name = m.config.UniqueName } // volumeName might get quite long. Docker is however happy to create volumes with long names. There is the "myth" // that volume names are treated like DNS labels, and thus only allow a length of 63 chars, however that does not hold // true. In fact, we already create way longer names, and would catch those issues in various integration tests. volumeName := m.withProtected(fmt.Sprintf("%s-cache-%s", name, hashedDestination)) vBody := volume.CreateOptions{ Name: volumeName, Driver: m.config.Driver, DriverOpts: m.config.DriverOpts, Labels: m.labeler.Labels(map[string]string{ "destination": destination, "protected": strconv.FormatBool(m.config.Protected), "type": "cache", }), } v, err := m.client.VolumeCreate(ctx, vBody) if err != nil { return "", fmt.Errorf("creating docker volume: %w", err) } if m.permissionSetter != nil { err = m.permissionSetter.Set(ctx, v.Name, m.labeler.Labels(map[string]string{"type": "cache-init"})) if err != nil { return "", fmt.Errorf("set volume permissions: %w", err) } } m.appendVolumeBind(&parser.Volume{ Source: v.Name, Destination: destination, }) m.logger.Debugln(fmt.Sprintf("Using volume %q as cache %q...", v.Name, destination)) return volumeName, nil } // CreateTemporary will create a volume, and mark it as temporary. When a volume // is marked as temporary it means that it should be cleaned up at some point. // It's up to the caller to clean up the temporary volumes by calling // `RemoveTemporary`. func (m *manager) CreateTemporary(ctx context.Context, destination string) error { volumeName, err := m.createCacheVolume(ctx, destination, false) if err != nil { return fmt.Errorf("creating cache volume: %w", err) } m.temporaryVolumes = append(m.temporaryVolumes, volumeName) return nil } // RemoveTemporary will remove all the volumes that are marked as temporary. If // the volume is not found the error is ignored, any other error is returned to // the caller. func (m *manager) RemoveTemporary(ctx context.Context) error { for _, v := range m.temporaryVolumes { err := m.client.VolumeRemove(ctx, v, true) if docker.IsErrNotFound(err) { m.logger.Debugln(fmt.Sprintf("volume not found: %q", v)) continue } if err != nil { return err } } return nil } // Binds returns all the bindings that the volume manager is aware of. func (m *manager) Binds() []string { return m.volumeBindings } // withProtected returns a string with a specific suffix when the config states, we are running against a protected // ref, or when any of the cache keys includes the `-protected` suffix. // See https://gitlab.com/gitlab-org/gitlab/-/work_items/494478. func (m *manager) withProtected(s string) string { if !m.config.Protected { return s } return s + protectedSuffix } ================================================ FILE: executors/docker/internal/volumes/manager_integration_test.go ================================================ //go:build integration package volumes_test import ( "context" "crypto/md5" "fmt" "testing" logrustest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/common/spec" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes" "gitlab.com/gitlab-org/gitlab-runner/helpers" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/test" ) func TestCreateVolumesLabels(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") successfulJobResponse, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "should be able to connect to docker") defer client.Close() successfulJobResponse.GitInfo.RepoURL = "https://user:pass@gitlab.example.com/namespace/project.git" build := &common.Build{ ProjectRunnerID: 0, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{Token: "test-token"}, }, Job: successfulJobResponse, } build.Variables = spec.Variables{ {Key: "CI_PIPELINE_ID", Value: "1"}, } logger, _ := logrustest.NewNullLogger() cfg := volumes.ManagerConfig{ CacheDir: "", BasePath: "", UniqueName: t.Name(), DisableCache: false, } volumeParser := parserCreator(build.GetAllVariables().ExpandValue) manager := volumes.NewManager(logger, volumeParser, client, cfg, labels.NewLabeler(build)) ctx := context.Background() err = manager.Create(ctx, testCreateVolumesLabelsDestinationPath) assert.NoError(t, err) name := fmt.Sprintf("%s-cache-%x", t.Name(), md5.Sum([]byte(testCreateVolumesLabelsDestinationPath))) defer func() { err = client.VolumeRemove(ctx, name, true) assert.NoError(t, err) }() volume, err := client.VolumeInspect(ctx, name) require.NoError(t, err) assert.Equal(t, map[string]string{ "com.gitlab.gitlab-runner.job.before_sha": "1ea27a9695f80d7816d9e8ce025d9b2df83d0dd7", "com.gitlab.gitlab-runner.job.id": "0", "com.gitlab.gitlab-runner.job.url": "https://gitlab.example.com/namespace/project/-/jobs/0", "com.gitlab.gitlab-runner.job.ref": "main", "com.gitlab.gitlab-runner.job.sha": "69b18e5ed3610cf646119c3e38f462c64ec462b7", "com.gitlab.gitlab-runner.job.timeout": "2h0m0s", "com.gitlab.gitlab-runner.managed": "true", "com.gitlab.gitlab-runner.pipeline.id": "1", "com.gitlab.gitlab-runner.project.id": "0", "com.gitlab.gitlab-runner.project.runner_id": "0", "com.gitlab.gitlab-runner.runner.id": "test-toke", "com.gitlab.gitlab-runner.runner.local_id": "0", "com.gitlab.gitlab-runner.runner.system_id": "", "com.gitlab.gitlab-runner.type": "cache", "com.gitlab.gitlab-runner.destination": testCreateVolumesDriverOptsDestinationPath, "com.gitlab.gitlab-runner.protected": "false", }, volume.Labels) } func TestCreateVolumesDriverOpts(t *testing.T) { helpers.SkipIntegrationTests(t, "docker", "info") // Windows local driver does not accept volume driver options. test.SkipIfGitLabCIOn(t, test.OSWindows) successfulJobResponse, err := common.GetRemoteSuccessfulBuild() require.NoError(t, err) client, err := docker.New(docker.Credentials{}) require.NoError(t, err, "should be able to connect to docker") defer client.Close() successfulJobResponse.GitInfo.RepoURL = "https://user:pass@gitlab.example.com/namespace/project.git" build := &common.Build{ ProjectRunnerID: 0, Runner: &common.RunnerConfig{ RunnerCredentials: common.RunnerCredentials{Token: "test-token"}, }, Job: successfulJobResponse, } build.Variables = spec.Variables{ {Key: "CI_PIPELINE_ID", Value: "1"}, } logger, _ := logrustest.NewNullLogger() cfg := volumes.ManagerConfig{ CacheDir: "", BasePath: "", UniqueName: t.Name(), DisableCache: false, DriverOpts: map[string]string{ "type": "tmpfs", "device": "tmpfs", "o": "size=100m,uid=1000", }, } volumeParser := parserCreator(build.GetAllVariables().ExpandValue) manager := volumes.NewManager(logger, volumeParser, client, cfg, labels.NewLabeler(build)) ctx := context.Background() err = manager.Create(ctx, testCreateVolumesDriverOptsDestinationPath) assert.NoError(t, err) name := fmt.Sprintf("%s-cache-%x", t.Name(), md5.Sum([]byte(testCreateVolumesDriverOptsDestinationPath))) defer func() { err = client.VolumeRemove(ctx, name, true) assert.NoError(t, err) }() volume, err := client.VolumeInspect(ctx, name) require.NoError(t, err) assert.Equal(t, map[string]string{"device": "tmpfs", "o": "size=100m,uid=1000", "type": "tmpfs"}, volume.Options) } ================================================ FILE: executors/docker/internal/volumes/manager_integration_unix_test.go ================================================ //go:build integration && (aix || android || darwin || dragonfly || freebsd || hurd || illumos || linux || netbsd || openbsd || solaris) package volumes_test import ( "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" ) var testCreateVolumesLabelsDestinationPath = "/test" var testCreateVolumesDriverOptsDestinationPath = "/test" func parserCreator(varExpander func(string) string) parser.Parser { return parser.NewLinuxParser(varExpander) } ================================================ FILE: executors/docker/internal/volumes/manager_integration_windows_test.go ================================================ //go:build integration package volumes_test import ( "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" ) var ( testCreateVolumesLabelsDestinationPath = `C:\test` testCreateVolumesDriverOptsDestinationPath = `C:\test` ) func parserCreator(varExpander func(string) string) parser.Parser { return parser.NewWindowsParser(varExpander) } ================================================ FILE: executors/docker/internal/volumes/manager_test.go ================================================ //go:build !integration package volumes import ( "errors" "testing" "github.com/docker/docker/api/types/volume" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/common" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/labels" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker/test" "gitlab.com/gitlab-org/gitlab-runner/helpers/path" ) func TestErrVolumeAlreadyDefined(t *testing.T) { err := NewErrVolumeAlreadyDefined("test-path") assert.EqualError(t, err, `volume for container path "test-path" is already defined`) } func TestNewDefaultManager(t *testing.T) { logger := newMockDebugLogger(t) m := NewManager(logger, nil, nil, ManagerConfig{}, nil) assert.IsType(t, &manager{}, m) } func newDefaultManager(t *testing.T, config ManagerConfig) *manager { b := &common.Build{ Runner: &common.RunnerConfig{}, } loggerMock := newMockDebugLogger(t) loggerMock.On("Debugln", mock.Anything).Maybe() m := &manager{ logger: loggerMock, config: config, managedVolumes: make(map[string]bool), labeler: labels.NewLabeler(b), } return m } func addUnixParser(t *testing.T, manager *manager) *parser.MockParser { return addParser(t, manager, path.NewUnixPath()) } func addParser(t *testing.T, manager *manager, p parser.Path) *parser.MockParser { parserMock := parser.NewMockParser(t) parserMock.On("Path").Return(p) manager.parser = parserMock return parserMock } func TestDefaultManager_CreateUserVolumes_HostVolume(t *testing.T) { existingBinding := "/host:/duplicated" testCases := map[string]struct { volume string parsedVolume *parser.Volume basePath string expectedBinding []string expectedError error }{ "no volumes specified": { volume: "", expectedBinding: []string{existingBinding}, }, "volume with absolute path": { volume: "/host:/volume", parsedVolume: &parser.Volume{Source: "/host", Destination: "/volume"}, expectedBinding: []string{existingBinding, "/host:/volume"}, }, "volume with absolute path and with basePath specified": { volume: "/host:/volume", parsedVolume: &parser.Volume{Source: "/host", Destination: "/volume"}, basePath: "/builds", expectedBinding: []string{existingBinding, "/host:/volume"}, }, "volume without absolute path and without basePath specified": { volume: "/host:volume", parsedVolume: &parser.Volume{Source: "/host", Destination: "volume"}, expectedBinding: []string{existingBinding, "/host:volume"}, }, "volume without absolute path and with basePath specified": { volume: "/host:volume", parsedVolume: &parser.Volume{Source: "/host", Destination: "volume"}, basePath: "/builds/project", expectedBinding: []string{existingBinding, "/host:/builds/project/volume"}, }, "duplicated volume specification": { volume: "/host/new:/duplicated", parsedVolume: &parser.Volume{Source: "/host/new", Destination: "/duplicated"}, expectedBinding: []string{existingBinding}, expectedError: NewErrVolumeAlreadyDefined("/duplicated"), }, "volume with mode specified": { volume: "/host/new:/my/path:ro", parsedVolume: &parser.Volume{Source: "/host/new", Destination: "/my/path", Mode: "ro"}, expectedBinding: []string{existingBinding, "/host/new:/my/path:ro"}, }, "root volume specified": { volume: "/host/new:/:ro", parsedVolume: &parser.Volume{Source: "/host/new", Destination: "/", Mode: "ro"}, expectedBinding: []string{existingBinding}, expectedError: errDirectoryIsRootPath, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: testCase.basePath, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) volumeParser.On("ParseVolume", existingBinding). Return(&parser.Volume{Source: "/host", Destination: "/duplicated"}, nil). Once() err := m.Create(t.Context(), existingBinding) require.NoError(t, err) if testCase.volume != "" { volumeParser.On("ParseVolume", testCase.volume). Return(testCase.parsedVolume, nil). Once() } err = m.Create(t.Context(), testCase.volume) assert.ErrorIs(t, err, testCase.expectedError) assert.Equal(t, testCase.expectedBinding, m.volumeBindings) }) } } func TestDefaultManager_CreateUserVolumes_CacheVolume_Disabled(t *testing.T) { existingBinding := "/host:/duplicated" testCases := map[string]struct { volume string parsedVolume *parser.Volume basePath string temporaryName string protected bool expectedVolumeCreateOpts *volume.CreateOptions expectedBindings []string expectedTemporary []string expectedError error }{ "no volumes specified": { volume: "", expectedBindings: []string{existingBinding}, }, "volume with absolute path, without basePath and with disableCache": { volume: "/volume", parsedVolume: &parser.Volume{Destination: "/volume"}, basePath: "", temporaryName: "temporary", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa", map[string]string{ "destination": "/volume", }), expectedBindings: []string{ existingBinding, "temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa:/volume", }, expectedTemporary: []string{"temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa"}, }, "volume with absolute path, with basePath and with disableCache": { volume: "/volume", parsedVolume: &parser.Volume{Destination: "/volume"}, basePath: "/builds/project", temporaryName: "temporary", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa", map[string]string{ "destination": "/volume", }), expectedBindings: []string{ existingBinding, "temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa:/volume", }, expectedTemporary: []string{"temporary-cache-14331bf18c8e434c4b3f48a8c5cc79aa"}, }, "volume without absolute path, without basePath and with disableCache": { volume: "volume", parsedVolume: &parser.Volume{Destination: "volume"}, temporaryName: "temporary", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-210ab9e731c9c36c2c38db15c28a8d1c", map[string]string{ "destination": "volume", }), expectedBindings: []string{ existingBinding, "temporary-cache-210ab9e731c9c36c2c38db15c28a8d1c:volume", }, expectedTemporary: []string{"temporary-cache-210ab9e731c9c36c2c38db15c28a8d1c"}, }, "volume without absolute path, with basePath and with disableCache": { volume: "volume", parsedVolume: &parser.Volume{Destination: "volume"}, basePath: "/builds/project", temporaryName: "temporary", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }), expectedBindings: []string{ existingBinding, "temporary-cache-f69aef9fb01e88e6213362a04877452d:/builds/project/volume", }, expectedTemporary: []string{"temporary-cache-f69aef9fb01e88e6213362a04877452d"}, }, "duplicated volume definition": { volume: "/duplicated", parsedVolume: &parser.Volume{Destination: "/duplicated"}, basePath: "", temporaryName: "temporary", expectedBindings: []string{existingBinding}, expectedError: NewErrVolumeAlreadyDefined("/duplicated"), }, "volume is root": { volume: "/", parsedVolume: &parser.Volume{Destination: "/"}, temporaryName: "temporary", expectedBindings: []string{existingBinding}, expectedError: errDirectoryIsRootPath, }, "protected": { volume: "some-volume", parsedVolume: &parser.Volume{Destination: "some-volume"}, basePath: "/some/base/path", temporaryName: "some-temporary", protected: true, expectedVolumeCreateOpts: testVolumeCreatOpts("some-temporary-cache-804b0f6b0d757899a37145f9d7f3848e-protected", map[string]string{ "destination": "/some/base/path/some-volume", "protected": "true", }), expectedBindings: []string{ existingBinding, "some-temporary-cache-804b0f6b0d757899a37145f9d7f3848e-protected:/some/base/path/some-volume", }, expectedTemporary: []string{"some-temporary-cache-804b0f6b0d757899a37145f9d7f3848e-protected"}, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: testCase.basePath, DisableCache: true, TemporaryName: testCase.temporaryName, Protected: testCase.protected, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient volumeParser.On("ParseVolume", "/host:/duplicated"). Return(&parser.Volume{Source: "/host", Destination: "/duplicated"}, nil). Once() if createOpts := testCase.expectedVolumeCreateOpts; createOpts != nil { mClient. On("VolumeCreate", mock.Anything, *createOpts). Return(volume.Volume{Name: createOpts.Name}, nil). Once() } err := m.Create(t.Context(), "/host:/duplicated") require.NoError(t, err) if testCase.volume != "" { volumeParser.On("ParseVolume", testCase.volume). Return(testCase.parsedVolume, nil). Once() } err = m.Create(t.Context(), testCase.volume) if testCase.expectedError != nil { assert.ErrorIs(t, err, testCase.expectedError) } else { assert.NoError(t, err) } assert.Equal(t, testCase.expectedBindings, m.Binds()) assert.Equal(t, testCase.expectedTemporary, m.temporaryVolumes) }) } } func TestDefaultManager_CreateUserVolumes_CacheVolume_HostBased(t *testing.T) { existingBinding := "/host:/duplicated" testCases := map[string]struct { volume string basePath string uniqueName string protected bool expectedBinding []string expectedError error }{ "volume with absolute path, without basePath": { volume: "/volume", uniqueName: "uniq", expectedBinding: []string{ existingBinding, "/cache/uniq/14331bf18c8e434c4b3f48a8c5cc79aa:/volume", }, }, "volume with absolute path, with basePath": { volume: "/volume", basePath: "/builds/project", uniqueName: "uniq", expectedBinding: []string{ existingBinding, "/cache/uniq/14331bf18c8e434c4b3f48a8c5cc79aa:/volume", }, }, "volume without absolute path, without basePath": { volume: "volume", uniqueName: "uniq", expectedBinding: []string{ existingBinding, "/cache/uniq/210ab9e731c9c36c2c38db15c28a8d1c:volume", }, }, "volume without absolute path, with basePath": { volume: "volume", basePath: "/builds/project", uniqueName: "uniq", expectedBinding: []string{ existingBinding, "/cache/uniq/f69aef9fb01e88e6213362a04877452d:/builds/project/volume", }, }, "duplicated volume definition": { volume: "/duplicated", uniqueName: "uniq", expectedBinding: []string{existingBinding}, expectedError: NewErrVolumeAlreadyDefined("/duplicated"), }, "volume is root": { volume: "/", expectedBinding: []string{existingBinding}, expectedError: errDirectoryIsRootPath, }, "protected": { volume: "some-volume", basePath: "/some/base/path", uniqueName: "some-unique-name", protected: true, expectedBinding: []string{ existingBinding, "/cache/some-unique-name/804b0f6b0d757899a37145f9d7f3848e-protected:/some/base/path/some-volume", }, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: testCase.basePath, DisableCache: false, CacheDir: "/cache", UniqueName: testCase.uniqueName, Protected: testCase.protected, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) volumeParser.On("ParseVolume", existingBinding). Return(&parser.Volume{Source: "/host", Destination: "/duplicated"}, nil). Once() err := m.Create(t.Context(), existingBinding) require.NoError(t, err) volumeParser.On("ParseVolume", testCase.volume). Return(&parser.Volume{Destination: testCase.volume}, nil). Once() err = m.Create(t.Context(), testCase.volume) assert.ErrorIs(t, err, testCase.expectedError) assert.Equal(t, testCase.expectedBinding, m.volumeBindings) }) } } func TestDefaultManager_CreateUserVolumes_CacheVolume_VolumeBased(t *testing.T) { existingBinding := "/host:/duplicated" testCases := map[string]struct { volume string basePath string uniqueName string protected bool expectedVolumeCreateOpts *volume.CreateOptions expectedBindings []string expectedError error }{ "volume with absolute path, without basePath and with existing volume": { volume: "/volume", basePath: "", uniqueName: "uniq", expectedVolumeCreateOpts: testVolumeCreatOpts("uniq-cache-14331bf18c8e434c4b3f48a8c5cc79aa", map[string]string{ "destination": "/volume", }), expectedBindings: []string{ existingBinding, "uniq-cache-14331bf18c8e434c4b3f48a8c5cc79aa:/volume", }, }, "volume without absolute path, with basePath": { volume: "volume", basePath: "/builds/project", uniqueName: "uniq", expectedVolumeCreateOpts: testVolumeCreatOpts("uniq-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }), expectedBindings: []string{ existingBinding, "uniq-cache-f69aef9fb01e88e6213362a04877452d:/builds/project/volume", }, }, "volume is root": { volume: "/", basePath: "", uniqueName: "uniq", expectedError: errDirectoryIsRootPath, }, "duplicated volume definition": { volume: "/duplicated", uniqueName: "uniq", expectedError: NewErrVolumeAlreadyDefined("/duplicated"), }, "protected": { volume: "some/volume", basePath: "/some/base/path", uniqueName: "some-unique-name", protected: true, expectedVolumeCreateOpts: testVolumeCreatOpts("some-unique-name-cache-7ee4ee58453a23f50e3e88641d9e4690-protected", map[string]string{ "destination": "/some/base/path/some/volume", "protected": "true", }), expectedBindings: []string{ existingBinding, "some-unique-name-cache-7ee4ee58453a23f50e3e88641d9e4690-protected:/some/base/path/some/volume", }, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: testCase.basePath, UniqueName: testCase.uniqueName, DisableCache: false, Protected: testCase.protected, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient volumeParser.On("ParseVolume", existingBinding). Return(&parser.Volume{Source: "/host", Destination: "/duplicated"}, nil). Once() volumeParser.On("ParseVolume", testCase.volume). Return(&parser.Volume{Destination: testCase.volume}, nil). Once() if createOpts := testCase.expectedVolumeCreateOpts; createOpts != nil { mClient. On("VolumeCreate", mock.Anything, *createOpts). Return(volume.Volume{Name: createOpts.Name}, nil). Once() } err := m.Create(t.Context(), existingBinding) require.NoError(t, err) err = m.Create(t.Context(), testCase.volume) if testCase.expectedError != nil { assert.ErrorIs(t, err, testCase.expectedError) return } assert.NoError(t, err) assert.Equal(t, testCase.expectedBindings, m.Binds()) }) } } func TestDefaultManager_CreateUserVolumes_CacheVolume_VolumeBased_WithError(t *testing.T) { testErr := errors.New("test-error") config := ManagerConfig{ BasePath: "/builds/project", UniqueName: "unique", } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient expectedCreateOpts := testVolumeCreatOpts("unique-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }) mClient. On("VolumeCreate", mock.Anything, *expectedCreateOpts). Return(volume.Volume{}, testErr). Once() volumeParser.On("ParseVolume", "volume"). Return(&parser.Volume{Destination: "volume"}, nil). Once() err := m.Create(t.Context(), "volume") assert.ErrorIs(t, err, testErr) } func TestDefaultManager_CreateUserVolumes_CacheVolume_Disabled_WithError(t *testing.T) { testErr := errors.New("test-error") config := ManagerConfig{ BasePath: "/builds/project", TemporaryName: "temporary", DisableCache: true, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient expectedCreateOpts := testVolumeCreatOpts("temporary-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }) mClient. On("VolumeCreate", mock.Anything, *expectedCreateOpts). Return(volume.Volume{}, testErr). Once() volumeParser.On("ParseVolume", "volume"). Return(&parser.Volume{Destination: "volume"}, nil). Once() err := m.Create(t.Context(), "volume") assert.ErrorIs(t, err, testErr) assert.Empty(t, m.Binds()) assert.Empty(t, m.temporaryVolumes) } func TestDefaultManager_CreateUserVolumes_CacheVolume_Disabled_TracksTemporaryVolumesForCleanup(t *testing.T) { config := ManagerConfig{ BasePath: "/builds/project", TemporaryName: "temporary", DisableCache: true, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient createOpts := testVolumeCreatOpts("temporary-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }) volumeParser.On("ParseVolume", "volume"). Return(&parser.Volume{Destination: "volume"}, nil).Once() mClient.On("VolumeCreate", mock.Anything, *createOpts). Return(volume.Volume{Name: createOpts.Name}, nil).Once() mClient.On("VolumeRemove", mock.Anything, createOpts.Name, true). Return(nil).Once() err := m.Create(t.Context(), "volume") require.NoError(t, err) assert.Equal(t, []string{createOpts.Name}, m.temporaryVolumes) assert.Equal(t, []string{ createOpts.Name + ":/builds/project/volume", }, m.Binds()) err = m.RemoveTemporary(t.Context()) assert.NoError(t, err) } func TestDefaultManager_CreateUserVolumes_ParserError(t *testing.T) { testErr := errors.New("parser-test-error") m := newDefaultManager(t, ManagerConfig{}) volumeParser := parser.NewMockParser(t) m.parser = volumeParser volumeParser.On("ParseVolume", "volume"). Return(nil, testErr). Once() err := m.Create(t.Context(), "volume") assert.ErrorIs(t, err, testErr) } func TestDefaultManager_CreateTemporary(t *testing.T) { volumeCreateErr := errors.New("volume-create") existingBinding := "/host:/duplicated" testCases := map[string]struct { volume string volumeCreateErr error protected bool expectedVolumeCreateOpts *volume.CreateOptions expectedBindings []string expectedTemporary []string expectedError error }{ "volume created": { volume: "volume", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }), expectedBindings: []string{ existingBinding, "temporary-cache-f69aef9fb01e88e6213362a04877452d:/builds/project/volume", }, }, "volume root": { volume: "/", expectedError: errDirectoryIsRootPath, }, "volume creation error": { volume: "volume", expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-f69aef9fb01e88e6213362a04877452d", map[string]string{ "destination": "/builds/project/volume", }), volumeCreateErr: volumeCreateErr, expectedError: volumeCreateErr, }, "duplicated volume definition": { volume: "/duplicated", expectedError: &ErrVolumeAlreadyDefined{}, }, "protected": { volume: "some/volume", protected: true, expectedVolumeCreateOpts: testVolumeCreatOpts("temporary-cache-12b6275e06323d2d4872c0c352d0c7dd-protected", map[string]string{ "destination": "/builds/project/some/volume", "protected": "true", }), expectedBindings: []string{ existingBinding, "temporary-cache-12b6275e06323d2d4872c0c352d0c7dd-protected:/builds/project/some/volume", }, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: "/builds/project", TemporaryName: "temporary", Protected: testCase.protected, } m := newDefaultManager(t, config) volumeParser := addUnixParser(t, m) mClient := docker.NewMockClient(t) m.client = mClient volumeParser.On("ParseVolume", existingBinding). Return(&parser.Volume{Source: "/host", Destination: "/duplicated"}, nil). Once() var expectedVolumeName string if createOpts := testCase.expectedVolumeCreateOpts; createOpts != nil { expectedVolumeName = createOpts.Name mClient. On("VolumeCreate", mock.Anything, *createOpts). Return(volume.Volume{Name: createOpts.Name}, testCase.volumeCreateErr). Once() } err := m.Create(t.Context(), existingBinding) require.NoError(t, err) err = m.CreateTemporary(t.Context(), testCase.volume) if testCase.expectedError != nil { assert.ErrorIs(t, err, testCase.expectedError) return } require.Len(t, m.temporaryVolumes, 1) assert.Equal(t, m.temporaryVolumes[0], expectedVolumeName) assert.ErrorIs(t, err, testCase.expectedError) assert.Equal(t, testCase.expectedBindings, m.Binds()) }) } } func TestDefaultManager_RemoveTemporary(t *testing.T) { testErr := errors.New("test-err") testCases := map[string]struct { temporaryVolumes []string clientAssertions func(*docker.MockClient) expectedError error }{ "no volumes to remove": { temporaryVolumes: []string{}, clientAssertions: func(c *docker.MockClient) {}, expectedError: nil, }, "all volumes removed": { temporaryVolumes: []string{"volume1", "volume2", "volume3"}, clientAssertions: func(c *docker.MockClient) { c.On("VolumeRemove", mock.Anything, "volume1", true).Return(nil).Once() c.On("VolumeRemove", mock.Anything, "volume2", true).Return(nil).Once() c.On("VolumeRemove", mock.Anything, "volume3", true).Return(nil).Once() }, expectedError: nil, }, "volume not found": { temporaryVolumes: []string{"nonexistent-volume"}, clientAssertions: func(c *docker.MockClient) { c.On("VolumeRemove", mock.Anything, "nonexistent-volume", true).Return(&test.NotFoundError{}).Once() }, expectedError: &test.NotFoundError{}, }, "failed to remove volume": { temporaryVolumes: []string{"volume-name-1"}, clientAssertions: func(c *docker.MockClient) { c.On("VolumeRemove", mock.Anything, "volume-name-1", true).Return(testErr).Once() }, expectedError: testErr, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { mClient := docker.NewMockClient(t) testCase.clientAssertions(mClient) m := newDefaultManager(t, ManagerConfig{}) m.client = mClient m.temporaryVolumes = testCase.temporaryVolumes err := m.RemoveTemporary(t.Context()) assert.ErrorIs(t, err, testCase.expectedError) }) } } func TestDefaultManager_Binds(t *testing.T) { expectedElements := []string{"element1", "element2"} m := &manager{ volumeBindings: expectedElements, } assert.Equal(t, expectedElements, m.Binds()) } func testVolumeCreatOpts(name string, additionalLabels map[string]string) *volume.CreateOptions { const pre = "com.gitlab.gitlab-runner" labels := map[string]string{ pre + ".type": "cache", pre + ".job.before_sha": "", pre + ".job.id": "0", pre + ".job.ref": "", pre + ".job.sha": "", pre + ".job.url": "/-/jobs/0", pre + ".job.timeout": "2h0m0s", pre + ".managed": "true", pre + ".pipeline.id": "", pre + ".project.id": "0", pre + ".project.runner_id": "0", pre + ".protected": "false", pre + ".runner.id": "", pre + ".runner.local_id": "0", pre + ".runner.system_id": "", pre + ".destination": "", } for k, v := range additionalLabels { labels[pre+"."+k] = v } return &volume.CreateOptions{ Name: name, Labels: labels, } } ================================================ FILE: executors/docker/internal/volumes/manager_windows_test.go ================================================ //go:build !integration package volumes import ( "context" "errors" "strings" "testing" "github.com/docker/docker/api/types/volume" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gitlab.com/gitlab-org/gitlab-runner/executors/docker/internal/volumes/parser" "gitlab.com/gitlab-org/gitlab-runner/helpers/docker" "gitlab.com/gitlab-org/gitlab-runner/helpers/path" ) func TestDefaultManager_CreateUserVolumes_CacheVolume_VolumeBased_Windows(t *testing.T) { const existingBinding = `\\.\pipe\host:\\.\pipe\duplicated` testCases := map[string]struct { volume string basePath string uniqueName string protected bool expectedVolumeCreateOpts *volume.CreateOptions expectedBindings []string expectedError error }{ "pipe name volume specified": { volume: `\\.\pipe\docker_engine`, uniqueName: "uniq", expectedVolumeCreateOpts: testVolumeCreatOpts("uniq-cache-8abd376d059fcf32b6258f48c760885d", map[string]string{ "destination": `\\.\pipe\docker_engine`, }), expectedBindings: []string{`\\.\pipe\host:\\.\pipe\duplicated`, `uniq-cache-8abd376d059fcf32b6258f48c760885d:\\.\pipe\docker_engine`}, }, "duplicate pipe name volume specified": { volume: `\\.\pipe\duplicated`, uniqueName: "uniq", expectedError: NewErrVolumeAlreadyDefined(`\\.\pipe\duplicated`), }, "protected": { volume: `\\.\pipe\docker_engine`, uniqueName: "uniq", protected: true, expectedVolumeCreateOpts: testVolumeCreatOpts("uniq-cache-8abd376d059fcf32b6258f48c760885d-protected", map[string]string{ "destination": `\\.\pipe\docker_engine`, "protected": "true", }), expectedBindings: []string{`\\.\pipe\host:\\.\pipe\duplicated`, `uniq-cache-8abd376d059fcf32b6258f48c760885d-protected:\\.\pipe\docker_engine`}, }, } for testName, testCase := range testCases { t.Run(testName, func(t *testing.T) { config := ManagerConfig{ BasePath: testCase.basePath, UniqueName: testCase.uniqueName, DisableCache: false, Protected: testCase.protected, } m := newDefaultManager(t, config) volumeParser := addParser(t, m, path.NewWindowsPath()) mClient := docker.NewMockClient(t) m.client = mClient existingBindingParts := strings.Split(existingBinding, ":") volumeParser.On("ParseVolume", existingBinding). Return(&parser.Volume{Source: existingBindingParts[0], Destination: existingBindingParts[1]}, nil). Once() volumeParser.On("ParseVolume", testCase.volume). Return(&parser.Volume{Destination: testCase.volume}, nil). Once() if createOpts := testCase.expectedVolumeCreateOpts; createOpts != nil { mClient. On("VolumeCreate", mock.Anything, *createOpts). Return(volume.Volume{Name: createOpts.Name}, nil). Once() } err := m.Create(context.Background(), existingBinding) require.NoError(t, err) err = m.Create(context.Background(), testCase.volume) if testCase.expectedError != nil { assert.True( t, errors.Is(err, testCase.expectedError), "expected err %T, but got %T", testCase.expectedError, err, ) return } assert.NoError(t, err) assert.Equal(t, testCase.expectedBindings, m.Binds()) }) } } ================================================ FILE: executors/docker/internal/volumes/mocks.go ================================================ // Code generated by mockery; DO NOT EDIT. // github.com/vektra/mockery // template: testify package volumes import ( "context" mock "github.com/stretchr/testify/mock" ) // NewMockManager creates a new instance of MockManager. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func NewMockManager(t interface { mock.TestingT Cleanup(func()) }) *MockManager { mock := &MockManager{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // MockManager is an autogenerated mock type for the Manager type type MockManager struct { mock.Mock } type MockManager_Expecter struct { mock *mock.Mock } func (_m *MockManager) EXPECT() *MockManager_Expecter { return &MockManager_Expecter{mock: &_m.Mock} } // Binds provides a mock function for the type MockManager func (_mock *MockManager) Binds() []string { ret := _mock.Called() if len(ret) == 0 { panic("no return value specified for Binds") } var r0 []string if returnFunc, ok := ret.Get(0).(func() []string); ok { r0 = returnFunc() } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]string) } } return r0 } // MockManager_Binds_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Binds' type MockManager_Binds_Call struct { *mock.Call } // Binds is a helper method to define mock.On call func (_e *MockManager_Expecter) Binds() *MockManager_Binds_Call { return &MockManager_Binds_Call{Call: _e.mock.On("Binds")} } func (_c *MockManager_Binds_Call) Run(run func()) *MockManager_Binds_Call { _c.Call.Run(func(args mock.Arguments) { run() }) return _c } func (_c *MockManager_Binds_Call) Return(strings []string) *MockManager_Binds_Call { _c.Call.Return(strings) return _c } func (_c *MockManager_Binds_Call) RunAndReturn(run func() []string) *MockManager_Binds_Call { _c.Call.Return(run) return _c } // Create provides a mock function for the type MockManager func (_mock *MockManager) Create(ctx context.Context, volume string) error { ret := _mock.Called(ctx, volume) if len(ret) == 0 { panic("no return value specified for Create") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context, string) error); ok { r0 = returnFunc(ctx, volume) } else { r0 = ret.Error(0) } return r0 } // MockManager_Create_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Create' type MockManager_Create_Call struct { *mock.Call } // Create is a helper method to define mock.On call // - ctx context.Context // - volume string func (_e *MockManager_Expecter) Create(ctx interface{}, volume interface{}) *MockManager_Create_Call { return &MockManager_Create_Call{Call: _e.mock.On("Create", ctx, volume)} } func (_c *MockManager_Create_Call) Run(run func(ctx context.Context, volume string)) *MockManager_Create_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockManager_Create_Call) Return(err error) *MockManager_Create_Call { _c.Call.Return(err) return _c } func (_c *MockManager_Create_Call) RunAndReturn(run func(ctx context.Context, volume string) error) *MockManager_Create_Call { _c.Call.Return(run) return _c } // CreateTemporary provides a mock function for the type MockManager func (_mock *MockManager) CreateTemporary(ctx context.Context, destination string) error { ret := _mock.Called(ctx, destination) if len(ret) == 0 { panic("no return value specified for CreateTemporary") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context, string) error); ok { r0 = returnFunc(ctx, destination) } else { r0 = ret.Error(0) } return r0 } // MockManager_CreateTemporary_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CreateTemporary' type MockManager_CreateTemporary_Call struct { *mock.Call } // CreateTemporary is a helper method to define mock.On call // - ctx context.Context // - destination string func (_e *MockManager_Expecter) CreateTemporary(ctx interface{}, destination interface{}) *MockManager_CreateTemporary_Call { return &MockManager_CreateTemporary_Call{Call: _e.mock.On("CreateTemporary", ctx, destination)} } func (_c *MockManager_CreateTemporary_Call) Run(run func(ctx context.Context, destination string)) *MockManager_CreateTemporary_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } var arg1 string if args[1] != nil { arg1 = args[1].(string) } run( arg0, arg1, ) }) return _c } func (_c *MockManager_CreateTemporary_Call) Return(err error) *MockManager_CreateTemporary_Call { _c.Call.Return(err) return _c } func (_c *MockManager_CreateTemporary_Call) RunAndReturn(run func(ctx context.Context, destination string) error) *MockManager_CreateTemporary_Call { _c.Call.Return(run) return _c } // RemoveTemporary provides a mock function for the type MockManager func (_mock *MockManager) RemoveTemporary(ctx context.Context) error { ret := _mock.Called(ctx) if len(ret) == 0 { panic("no return value specified for RemoveTemporary") } var r0 error if returnFunc, ok := ret.Get(0).(func(context.Context) error); ok { r0 = returnFunc(ctx) } else { r0 = ret.Error(0) } return r0 } // MockManager_RemoveTemporary_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'RemoveTemporary' type MockManager_RemoveTemporary_Call struct { *mock.Call } // RemoveTemporary is a helper method to define mock.On call // - ctx context.Context func (_e *MockManager_Expecter) RemoveTemporary(ctx interface{}) *MockManager_RemoveTemporary_Call { return &MockManager_RemoveTemporary_Call{Call: _e.mock.On("RemoveTemporary", ctx)} } func (_c *MockManager_RemoveTemporary_Call) Run(run func(ctx context.Context)) *MockManager_RemoveTemporary_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 context.Context if args[0] != nil { arg0 = args[0].(context.Context) } run( arg0, ) }) return _c } func (_c *MockManager_RemoveTemporary_Call) Return(err error) *MockManager_RemoveTemporary_Call { _c.Call.Return(err) return _c } func (_c *MockManager_RemoveTemporary_Call) RunAndReturn(run func(ctx context.Context) error) *MockManager_RemoveTemporary_Call { _c.Call.Return(run) return _c } // newMockDebugLogger creates a new instance of mockDebugLogger. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. // The first argument is typically a *testing.T value. func newMockDebugLogger(t interface { mock.TestingT Cleanup(func()) }) *mockDebugLogger { mock := &mockDebugLogger{} mock.Mock.Test(t) t.Cleanup(func() { mock.AssertExpectations(t) }) return mock } // mockDebugLogger is an autogenerated mock type for the debugLogger type type mockDebugLogger struct { mock.Mock } type mockDebugLogger_Expecter struct { mock *mock.Mock } func (_m *mockDebugLogger) EXPECT() *mockDebugLogger_Expecter { return &mockDebugLogger_Expecter{mock: &_m.Mock} } // Debugln provides a mock function for the type mockDebugLogger func (_mock *mockDebugLogger) Debugln(args ...interface{}) { var _ca []interface{} _ca = append(_ca, args...) _mock.Called(_ca...) return } // mockDebugLogger_Debugln_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Debugln' type mockDebugLogger_Debugln_Call struct { *mock.Call } // Debugln is a helper method to define mock.On call // - args ...interface{} func (_e *mockDebugLogger_Expecter) Debugln(args ...interface{}) *mockDebugLogger_Debugln_Call { return &mockDebugLogger_Debugln_Call{Call: _e.mock.On("Debugln", append([]interface{}{}, args...)...)} } func (_c *mockDebugLogger_Debugln_Call) Run(run func(args ...interface{})) *mockDebugLogger_Debugln_Call { _c.Call.Run(func(args mock.Arguments) { var arg0 []interface{} variadicArgs := make([]interface{}, len(args)-0) for i, a := range args[0:] { if a != nil { variadicArgs[i] = a.(interface{}) } } arg0 = variadicArgs run( arg0..., ) }) return _c } func (_c *mockDebugLogger_Debugln_Call) Return() *mockDebugLogger_Debugln_Call { _c.Call.Return() return _c } func (_c *mockDebugLogger_Debugln_Call) RunAndReturn(run func(args ...interface{})) *mockDebugLogger_Debugln_Call { _c.Run(run) return _c } ================================================ FILE: executors/docker/internal/volumes/parser/base_parser.go ================================================ package parser import ( "regexp" ) type baseParser struct { path Path varExpander func(string) string } // The way how matchesToVolumeSpecParts parses the volume mount specification and assigns // parts was inspired by how Docker Engine's `windowsParser` is created. The original sources // can be found at: // // https://github.com/docker/engine/blob/a79fabbfe84117696a19671f4aa88b82d0f64fc1/volume/mounts/windows_parser.go // // The original source is licensed under Apache License 2.0 and the copyright for it // goes to Docker, Inc. func (p *baseParser) matchesToVolumeSpecParts(spec string, specExp *regexp.Regexp) (map[string]string, error) { match := specExp.FindStringSubmatch(spec) if len(match) == 0 { return nil, NewInvalidVolumeSpecErr(spec) } matchgroups := make(map[string]string) for i, name := range specExp.SubexpNames() { matchgroups[name] = match[i] } parts := map[string]string{ "source": "", "destination": "", "mode": "", "label": "", "bindPropagation": "", } for group := range parts { content, ok := matchgroups[group] if !ok { continue } switch group { case "destination": // We only want to expand destination, and not source or anything else. parts[group] = p.varExpander(content) default: parts[group] = content } } return parts, nil } func (p *baseParser) Path() Path { return p.path } ================================================ FILE: executors/docker/internal/volumes/parser/errors.go ================================================ package parser import ( "fmt" ) type InvalidVolumeSpecError struct { spec string } func (e *InvalidVolumeSpecError) Error() string { return fmt.Sprintf("invalid volume specification: %q", e.spec) } func NewInvalidVolumeSpecErr(spec string) error { return &InvalidVolumeSpecError{ spec: spec, } } ================================================ FILE: executors/docker/internal/volumes/parser/linux_parser.go ================================================ package parser import ( "regexp" "gitlab.com/gitlab-org/gitlab-runner/helpers/path" ) const ( linuxDir = `/(?:[^\\/:*?"<>|\r\n ]+/?)*` linuxVolumeName = `[^\\/:*?"<>|\r\n]+` linuxSource = `((?P((` + linuxDir + `)|(` + linuxVolumeName + `))):)?` linuxDestination = `(?P(?:` + linuxDir + `))` linuxMode = `(:(?P(?i)(ro|rw|O)))?` linuxLabel = `((:|,)(?P